issue
dict
pr
dict
pr_details
dict
{ "body": "Older version of start-stop-daemon don't support --status so the new init.d script throws an error on Ubuntu 10.04\n\n```\nservice elasticsearch restart\n * Starting ElasticSearch Server start-stop-daemon: unrecognized option '--status'\nTry 'start-stop-daemon --help' for more information.\nstart-stop-daemon: unrecognized option '--status'\nTry 'start-stop-daemon --help' for more information.\n```\n\nOnce started running service elasticsearch stop doesn't stop the service it has to be killed manually. \n", "comments": [ { "body": "Valid point. I'll see how to work around that. If I got it right 10.04 is an LTS release, so it makes sense to work on that, right?\n", "created_at": "2013-08-07T07:22:28Z" }, { "body": "Yep 10.04 is LTS it's supported until 2015.\n", "created_at": "2013-08-07T08:00:58Z" }, { "body": "hey\n\ncreated a PR for this. It would be a great help if you could test the init script in your ubuntu installation and see if it works. Just replace the current one with this:\n\nhttps://raw.github.com/spinscale/elasticsearch/issue-3452-debian-init-script/src/deb/init.d/elasticsearch\n\nThanks a lot!\n", "created_at": "2013-08-08T12:06:57Z" }, { "body": "Yep that works great!\n", "created_at": "2013-08-08T12:15:04Z" }, { "body": "Hi,\n\nworks also great on Debian 6 :)\n", "created_at": "2013-08-11T12:50:55Z" }, { "body": "fyi this is not in `0.90.3.2`, downgrading to `0.90.2` helped.\n", "created_at": "2013-08-26T13:51:56Z" } ], "number": 3452, "title": "Older version of start-stop-daemon don't support --status" }
{ "body": "By making use of the lsb provided functions, one does not depend on the start-stop daemon version to test if elasticsearch is running.\nThis ensures, that the init script works on debian wheezy, squeeze, current ubuntu and LTS versions.\n\nCloses #3452\n", "number": 3462, "review_comments": [], "title": "Fix debian init script to not depend on new start-stop-daemon" }
{ "commits": [ { "message": "Fix debian init script to not depend on new start-stop-daemon\n\nBy making use of the lsb provided functions, one does not depend on the start-stop daemon version to test if elasticsearch is running.\nThis ensures, that the init script works on debian wheezy, squeeze, current ubuntu and LTS versions.\n\nCloses #3452" } ], "files": [ { "diff": "@@ -19,8 +19,6 @@\n # Description: Starts elasticsearch using start-stop-daemon\n ### END INIT INFO\n \n-set -e\n-\n PATH=/bin:/usr/bin:/sbin:/usr/sbin\n NAME=elasticsearch\n DESC=\"ElasticSearch Server\"\n@@ -113,7 +111,6 @@ export ES_JAVA_OPTS\n test -x $DAEMON || exit 0\n \n checkJava() {\n-\tset +e\n \tif [ -x \"$JAVA_HOME/bin/java\" ]; then\n \t\tJAVA=\"$JAVA_HOME/bin/java\"\n \telse\n@@ -124,7 +121,6 @@ checkJava() {\n \t\techo \"Could not find any executable java binary. Please install java in your PATH or set JAVA_HOME\"\n \t\texit 1\n \tfi\n-\tset -e\n }\n \n case \"$1\" in\n@@ -138,43 +134,32 @@ case \"$1\" in\n \n \tlog_daemon_msg \"Starting $DESC\"\n \n-\tset +e\n-\tstart-stop-daemon --status --pidfile \"$PID_FILE\" >/dev/null\n-\tif [ \"$?\" != \"0\" ]; then\n-\t\t# Prepare environment\n-\t\tmkdir -p \"$LOG_DIR\" \"$DATA_DIR\" \"$WORK_DIR\" && chown \"$ES_USER\":\"$ES_GROUP\" \"$LOG_DIR\" \"$DATA_DIR\" \"$WORK_DIR\"\n-\t\ttouch \"$PID_FILE\" && chown \"$ES_USER\":\"$ES_GROUP\" \"$PID_FILE\"\n+\tpid=$( pidofproc -p $PID_FILE elasticsearch)\n+\tif [ -n \"$pid\" ] ; then\n+\t\tlog_begin_msg \"Already running.\"\n+\t\tlog_end_msg 0\n+\t\texit 0\n+\tfi\n \n-\t\tif [ -n \"$MAX_OPEN_FILES\" ]; then\n-\t\t\tulimit -n $MAX_OPEN_FILES\n-\t\tfi\n+\t# Prepare environment\n+\tmkdir -p \"$LOG_DIR\" \"$DATA_DIR\" \"$WORK_DIR\" && chown \"$ES_USER\":\"$ES_GROUP\" \"$LOG_DIR\" \"$DATA_DIR\" \"$WORK_DIR\"\n+\ttouch \"$PID_FILE\" && chown \"$ES_USER\":\"$ES_GROUP\" \"$PID_FILE\"\n \n-\t\tif [ -n \"$MAX_LOCKED_MEMORY\" ]; then\n-\t\t\tulimit -l $MAX_LOCKED_MEMORY\n-\t\tfi\n-\n-\t\t# Start Daemon\n-\t\tstart-stop-daemon --start -b --user \"$ES_USER\" -c \"$ES_USER\" --pidfile \"$PID_FILE\" --exec $DAEMON -- $DAEMON_OPTS\n+\tif [ -n \"$MAX_OPEN_FILES\" ]; then\n+\t\tulimit -n $MAX_OPEN_FILES\n+\tfi\n \n-\t\tsleep 1\n-\t\tstart-stop-daemon --status --pidfile \"$PID_FILE\" >/dev/null\n-\t\tif [ \"$?\" != \"0\" ]; then\n-\t\t\tif [ -f \"$PID_FILE\" ]; then\n-\t\t\t\trm -f \"$PID_FILE\"\n-\t\t\tfi\n-\t\t\tlog_end_msg 1\n-\t\telse\n-\t\t\tlog_end_msg 0\n-\t\tfi\n-\telse\n-\t log_progress_msg \"(already running)\"\n-\t log_end_msg 0\n+\tif [ -n \"$MAX_LOCKED_MEMORY\" ]; then\n+\t\tulimit -l $MAX_LOCKED_MEMORY\n \tfi\n+\n+\t# Start Daemon\n+\tstart-stop-daemon --start -b --user \"$ES_USER\" -c \"$ES_USER\" --pidfile \"$PID_FILE\" --exec $DAEMON -- $DAEMON_OPTS\n+\tlog_end_msg $?\n \t;;\t\t\n stop)\n \tlog_daemon_msg \"Stopping $DESC\"\n \n-\tset +e\n \tif [ -f \"$PID_FILE\" ]; then \n \t\tstart-stop-daemon --stop --pidfile \"$PID_FILE\" \\\n \t\t\t--user \"$ES_USER\" \\\n@@ -191,23 +176,9 @@ case \"$1\" in\n \t\tlog_progress_msg \"(not running)\"\n \tfi\n \tlog_end_msg 0\n-\tset -e\n \t;;\n status)\n-\tset +e\n-\tstart-stop-daemon --status --pidfile \"$PID_FILE\" >/dev/null 2>&1\n-\tif [ \"$?\" != \"0\" ]; then\n-\t\tif [ -f \"$PID_FILE\" ]; then\n-\t\t log_success_msg \"$DESC is not running, but pid file exists.\"\n-\t\t\texit 1\n-\t\telse\n-\t\t log_success_msg \"$DESC is not running.\"\n-\t\t\texit 3\n-\t\tfi\n-\telse\n-\t\tlog_success_msg \"$DESC is running with pid `cat $PID_FILE`\"\n-\tfi\n-\tset -e\n+\tstatus_of_proc -p $PID_FILE elasticsearch elasticsearch && exit 0 || exit $?\n ;;\n restart|force-reload)\n \tif [ -f \"$PID_FILE\" ]; then", "filename": "src/deb/init.d/elasticsearch", "status": "modified" } ] }
{ "body": "I had an instance running on a server, and the PID file had a 5-digit process ID in it. I did a restart, and the new process ID was 4-digits, but the file sitll had the 5th digit from the prior process.\n\nWhen writing the pid file, it should truncate it first if it already exists.\n\nRunning elasticsearch 0.90.0 on FreeBSD.\n\nThanks!\n", "comments": [ { "body": "Hey,\n\ncan you please provide more information. how you did this? I just tried the following (under Mac OS):\n\n```\nbin/elasticsearch -p /tmp/pid\nsleep 10 && cat /tmp/pid\n36756\nbin/elasticsearch -p /tmp/pid\nsleep 10 && cat /tmp/pid\n36792\n```\n\nIn my case the truncation works flawlessly (there is no check in elasticsearch if the pid file already exists). Anything you did different with the exception of your operating system? I am wondering if the sigar library is completely supported under freebsd, but judging from the official documentation it looks like it.\n", "created_at": "2013-08-02T07:05:02Z" }, { "body": "Your example shows the PID both of length 5.\n\nTry this:\n\necho aaaaaaaaaa > /tmp/pid\n\nthen run your test.\n\nHere is what it looks like to me:\n\n[root@logger]# /usr/local/etc/rc.d/elasticsearch start\nStarting elasticsearch.\n[root@logger]# cat /var/run/elasticsearch.pid\n40757[root@logger]#\n[root@logger]# /usr/local/etc/rc.d/elasticsearch stop\nStopping elasticsearch.\n[root@logger]# echo aaaaaaaaaa > /var/run/elasticsearch.pid\n[root@logger]# cat /var/run/elasticsearch.pid\naaaaaaaaaa\n[root@logger]# /usr/local/etc/rc.d/elasticsearch start\nStarting elasticsearch.\n[root@logger]# cat /var/run/elasticsearch.pid\n75686aaaaa\n\nBasically, if the pid assigned to elastic search is 5 digits long, and then\nafter a restart, the PID becomes 3 or 4 digits long, whatever was leftover\nwill still be there.\n\nSo if the first PID is 12345 and the second pid is 4321, what will be in\nthe file is 43215.\n\nOn FreeBSD, the PIDs are not necessarily monotonically increasing, so this\nhas actually happened to me, and is not just a theoretical problem. I'm not\nso familiar with Java, but in C or Perl, I'd add the O_TRUNC flag to the\nfile open.\n\nThanks!\n\nOn Fri, Aug 2, 2013 at 3:05 AM, Alexander Reelsen\nnotifications@github.comwrote:\n\n> Hey,\n> \n> can you please provide more information. how you did this? I just tried\n> the following (under Mac OS):\n> \n> bin/elasticsearch -p /tmp/pid\n> sleep 10 && cat /tmp/pid\n> 36756\n> bin/elasticsearch -p /tmp/pid\n> sleep 10 && cat /tmp/pid\n> 36792\n> \n> In my case the truncation works flawlessly (there is no check in\n> elasticsearch if the pid file already exists). Anything you did different\n> with the exception of your operating system? I am wondering if the sigar\n> library is completely supported under freebsd, but judging from the\n> official documentation it looks like it.\n> \n> —\n> Reply to this email directly or view it on GitHubhttps://github.com/elasticsearch/elasticsearch/issues/3425#issuecomment-21989725\n> .\n", "created_at": "2013-08-02T16:29:27Z" }, { "body": "sorry, I got you wrong. Will fix it.\n\nThanks for your patience to report!\n", "created_at": "2013-08-03T14:09:58Z" } ], "number": 3425, "title": "pid file not properly overwritten" }
{ "body": "The current implementation does not overwrite, but only prepend the new PID into the pidfile.\nSo if the process is 4 digits long, but the file is already there with a 5 digit number, the file will contain 5 digits after the write.\n\nNote: If the pidfile still exists this usually means, there either is already an instance running using this pidfile or the process has not finished correctly.\n\nCloses #3425\n", "number": 3437, "review_comments": [], "title": "Overwriting pidfile on startup" }
{ "commits": [ { "message": "Overwriting pidfile on startup\n\nThe current implementation does not overwrite, but only prepend the new PID into the pidfile.\nSo if the process is 4 digits long, but the file is already there with a 5 digit number, the file will contain 5 digits after the write.\n\nNote: If the pidfile still exists this usually means, there either is already an instance running using this pidfile or the process has not finished correctly.\n\nCloses #3425" } ], "files": [ { "diff": "@@ -37,6 +37,7 @@\n import org.elasticsearch.node.internal.InternalSettingsPerparer;\n \n import java.io.File;\n+import java.io.FileOutputStream;\n import java.io.RandomAccessFile;\n import java.util.Locale;\n import java.util.Set;\n@@ -151,9 +152,9 @@ public static void main(String[] args) {\n if (fPidFile.getParentFile() != null) {\n FileSystemUtils.mkdirs(fPidFile.getParentFile());\n }\n- RandomAccessFile rafPidFile = new RandomAccessFile(fPidFile, \"rw\");\n- rafPidFile.writeBytes(Long.toString(JvmInfo.jvmInfo().pid()));\n- rafPidFile.close();\n+ FileOutputStream outputStream = new FileOutputStream(fPidFile);\n+ outputStream.write(Long.toString(JvmInfo.jvmInfo().pid()).getBytes());\n+ outputStream.close();\n \n fPidFile.deleteOnExit();\n } catch (Exception e) {", "filename": "src/main/java/org/elasticsearch/bootstrap/Bootstrap.java", "status": "modified" } ] }
{ "body": "Closing an index right after it has been creating leaves it in an unopenable state. Specifically, opening the index results in a bunch of shards being unassigned and not getting assigned automatically. Reproduction curl commands ready to pase into a shell: https://gist.github.com/nik9000/5970277\n", "comments": [ { "body": "Confirmed, this is indeed the case. There is simple workaround for this issue though - just make sure that the index gets to at least to yellow state before trying to close it. I am curious is there a particular use case that led to this being a problem?\n", "created_at": "2013-07-14T17:18:26Z" }, { "body": "On Sun, Jul 14, 2013 at 1:18 PM, Igor Motov notifications@github.comwrote:\n\n> Confirmed, this is indeed the case. There is simple workaround for this\n> issue though - just make sure that the index gets to at least to yellow\n> state before trying to close it. I am curious is there a particular use\n> case that led to this being a problem?\n\nI was building a script that can build an index from scratch as well as\nupdate it portions of its configuration are out of date and being lazy\nabout how I implemented it figuring that an empty index would be cheap to\nopen and close. After I stopped being lazy and specifying the analysers\nduring index creation my problem went away. It'd probably have been good\nenough to add a note to\nhttp://www.elasticsearch.org/guide/reference/api/admin-indices-open-close/and\nI wouldn't have tried it.\n\nAre there other non-index-creation cases that put the index in this state?\nIf so it might be worth implementing something stops the close action.\n\nNik\n", "created_at": "2013-07-14T19:48:42Z" }, { "body": "@imotov I believe as you mentioned that we can really open an index only after all the primary shards have been allocation at least once. This is because we can't recreate the `primaryAllocatedPostApi` flag (we could potentially, but its not supported now).\n\nI suggest that a simple fix for now is to reject a close index request if one of its index shard routing info has the primaryAllocatedPostApi set to false.\n", "created_at": "2013-07-14T20:04:47Z" }, { "body": "While working on a patch for the issue kimchy mentioned I noticed a look alike issue: https://gist.github.com/nik9000/6028478\n\nI'll post another github issue after some more investigation.\n", "created_at": "2013-07-18T11:07:11Z" }, { "body": "Hi @nik9000. Thanks for the PR. I am just thinking maybe we can remove `assertRed();` and `assert false;` from the test. This way we are still testing fast closing, but even if closing is not fast enough test wouldn't fail. I also feel that 128 shards might be a bit excessive. Maybe reduce it to 50 or even 20?\n\nI looked at the related problem with quorum as well. Not really sure what we should do about it. Should we even allow creation of an index with 3 replicas and `index.recovery.initial_shards=quorum` on a single node? On the other side, even if we have 4 nodes, it's not always obvious if we can fulfill `index.recovery.initial_shards` requirements or not. So, we could store some flag in index metadata that would indicate that this index wasn't fully allocated at least once yet. And if this flag is set, LocalGatewayAllocator would ignore `requiredAllocation` or as in case of prematurely closed index create missing shards as needed. We could even use this flag to block any operations on such index, so it would be really obvious that this index is not in a proper state. @kimchy what do you think?\n", "created_at": "2013-07-20T00:13:52Z" }, { "body": "On Fri, Jul 19, 2013 at 8:14 PM, Igor Motov notifications@github.comwrote:\n\n> Hi @nik9000 https://github.com/nik9000. Thanks for the PR.\n\nThanks for taking the time to read it!\n\n> I am just thinking maybe we can remove assertRed(); and assert false;from the test. This way we are still testing fast closing, but even if\n> closing is not fast enough test wouldn't fail. I also feel that 128 shards\n> might be a bit excessive. Maybe reduce it to 50 or even 20?\n> \n> I'm not sure it'd be a good test if sometimes it didn't verify anything.\n> If we were in JUnit I'd say we could use the Assume api but I'm not really\n> sure what the right thing is in TestNG.\n\nAs to the 128 shards it was just a number that seemed to trigger the\nbehavior. IIRC 20 wouldn't have consistently triggered the problem on my\nlaptop. 50 probably would but I didn't want to risk someone having a\nfaster machine than mine and getting an unexpectedly useless/failing test.\n\n> I looked at the related problem with quorum as well. Not really sure what\n> we should do about it. Should we even allow creation of an index with 3\n> replicas and index.recovery.initial_shards=quorum on a single node? On\n> the other side, even if we have 4 nodes, it's not always obvious if we can\n> fulfill index.recovery.initial_shards requirements or not. So, we could\n> store some flag in index metadata that would indicate that this index\n> wasn't fully allocated at least once yet. And if this flag is set,\n> LocalGatewayAllocator would ignore requiredAllocation or as in case of\n> prematurely closed index create missing shards as needed. We could even use\n> this flag to block any operations on such index, so it would be really\n> obvious that this index is not in a proper state. @kimchyhttps://github.com/kimchywhat do you think?\n> \n> For my book stopping people when they ask for a configuration that just\n> isn't going to fully allocate sounds like the right thing to do. It'd\n> probably make sense to have a force flag that gets the unchecked behavior\n> but with a warning that things might not work properly if you don't bring\n> those nodes online.\n\nWhat about the case where when you create the index everything makes sense\nand allocates properly but then you lose a node? Without that node you can\nclose the index but it won't open again until you bring that node back\nonline. At least, that is what I saw when I was playing with\nhttps://github.com/elasticsearch/elasticsearch/issues/3354.\n\n> —\n> Reply to this email directly or view it on GitHubhttps://github.com/elasticsearch/elasticsearch/issues/3313#issuecomment-21284580\n> .\n", "created_at": "2013-07-20T00:51:43Z" }, { "body": "I could work the test so it used a small number of shards and just tried\nagain if it didn't hit the problem. That wouldn't be too tough. I'll have\na look at that sometime in the next few days.\n\nOn Fri, Jul 19, 2013 at 8:51 PM, Nikolas Everett nik9000@gmail.com wrote:\n\n> On Fri, Jul 19, 2013 at 8:14 PM, Igor Motov notifications@github.comwrote:\n> \n> > Hi @nik9000 https://github.com/nik9000. Thanks for the PR.\n> \n> Thanks for taking the time to read it!\n> \n> > I am just thinking maybe we can remove assertRed(); and assert false;from the test. This way we are still testing fast closing, but even if\n> > closing is not fast enough test wouldn't fail. I also feel that 128 shards\n> > might be a bit excessive. Maybe reduce it to 50 or even 20?\n> > \n> > I'm not sure it'd be a good test if sometimes it didn't verify anything.\n> > If we were in JUnit I'd say we could use the Assume api but I'm not really\n> > sure what the right thing is in TestNG.\n> \n> As to the 128 shards it was just a number that seemed to trigger the\n> behavior. IIRC 20 wouldn't have consistently triggered the problem on my\n> laptop. 50 probably would but I didn't want to risk someone having a\n> faster machine than mine and getting an unexpectedly useless/failing test.\n> \n> > I looked at the related problem with quorum as well. Not really sure what\n> > we should do about it. Should we even allow creation of an index with 3\n> > replicas and index.recovery.initial_shards=quorum on a single node? On\n> > the other side, even if we have 4 nodes, it's not always obvious if we can\n> > fulfill index.recovery.initial_shards requirements or not. So, we could\n> > store some flag in index metadata that would indicate that this index\n> > wasn't fully allocated at least once yet. And if this flag is set,\n> > LocalGatewayAllocator would ignore requiredAllocation or as in case of\n> > prematurely closed index create missing shards as needed. We could even use\n> > this flag to block any operations on such index, so it would be really\n> > obvious that this index is not in a proper state. @kimchyhttps://github.com/kimchywhat do you think?\n> > \n> > For my book stopping people when they ask for a configuration that just\n> > isn't going to fully allocate sounds like the right thing to do. It'd\n> > probably make sense to have a force flag that gets the unchecked behavior\n> > but with a warning that things might not work properly if you don't bring\n> > those nodes online.\n> \n> What about the case where when you create the index everything makes sense\n> and allocates properly but then you lose a node? Without that node you can\n> close the index but it won't open again until you bring that node back\n> online. At least, that is what I saw when I was playing with\n> https://github.com/elasticsearch/elasticsearch/issues/3354.\n> \n> > —\n> > Reply to this email directly or view it on GitHubhttps://github.com/elasticsearch/elasticsearch/issues/3313#issuecomment-21284580\n> > .\n", "created_at": "2013-07-20T00:52:59Z" }, { "body": "Interesting, I was able to consistently reproduce it with 10 shards (and even 5 in most cases), hence the suggested number. I was thinking about retrying logic as well, but you would still need to remove assertRed() to remove race condition between checking index health and closing the index and then do clean up of index that failed to create the issue. \n", "created_at": "2013-07-20T01:04:01Z" }, { "body": "I've updated the pull request with a retry logic and it looks like I can\nreproduce it with two shards! I suppose I should have tried ratcheting\ndown the number rather than watching my logs. Anyway I feel better with\nthe retry logic making sure the test actually does something but runs more\nquickly if it can get away with it.\n\nNik\n\nOn Fri, Jul 19, 2013 at 9:04 PM, Igor Motov notifications@github.comwrote:\n\n> Interesting, I was able to consistently reproduce it with 10 shards (and\n> even 5 in most cases), hence the suggested number. I was thinking about\n> retrying logic as well, but you would still need to remove assertRed() to\n> remove race condition between checking index health and closing the index\n> and then do clean up of index that failed to create the issue.\n> \n> —\n> Reply to this email directly or view it on GitHubhttps://github.com/elasticsearch/elasticsearch/issues/3313#issuecomment-21285609\n> .\n", "created_at": "2013-07-20T18:36:48Z" } ], "number": 3313, "title": "Closing an index right after it has been creating leaves it in an unopenable state" }
{ "body": "Refuse to close indexes that have not had their primary shard shard allocated post api action because this leaves the index in an un-openable state.\n\nCloses #3313\n", "number": 3353, "review_comments": [], "title": "Don't allow unallocated indexes to be closed." }
{ "commits": [ { "message": "Don't allow unallocated indexes to be closed." } ], "files": [ { "diff": "@@ -26,6 +26,8 @@\n import org.elasticsearch.cluster.block.ClusterBlock;\n import org.elasticsearch.cluster.block.ClusterBlockLevel;\n import org.elasticsearch.cluster.block.ClusterBlocks;\n+import org.elasticsearch.cluster.routing.IndexRoutingTable;\n+import org.elasticsearch.cluster.routing.IndexShardRoutingTable;\n import org.elasticsearch.cluster.routing.RoutingTable;\n import org.elasticsearch.cluster.routing.allocation.AllocationService;\n import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;\n@@ -36,6 +38,7 @@\n import org.elasticsearch.common.unit.TimeValue;\n import org.elasticsearch.index.Index;\n import org.elasticsearch.indices.IndexMissingException;\n+import org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException;\n import org.elasticsearch.rest.RestStatus;\n \n import java.util.ArrayList;\n@@ -76,6 +79,14 @@ public ClusterState execute(ClusterState currentState) {\n listener.onFailure(new IndexMissingException(new Index(index)));\n return currentState;\n }\n+ IndexRoutingTable indexRoutingTable = currentState.routingTable().index(index);\n+ for (IndexShardRoutingTable shard: indexRoutingTable) {\n+ if (!shard.primaryAllocatedPostApi()) {\n+ listener.onFailure(new IndexPrimaryShardNotAllocatedException(new Index(index)));\n+ return currentState;\n+ }\n+ }\n+\n if (indexMetaData.state() != IndexMetaData.State.CLOSE) {\n indicesToClose.add(index);\n }", "filename": "src/main/java/org/elasticsearch/cluster/metadata/MetaDataStateIndexService.java", "status": "modified" }, { "diff": "@@ -0,0 +1,40 @@\n+/*\n+ * Licensed to ElasticSearch and Shay Banon under one\n+ * or more contributor license agreements. See the NOTICE file\n+ * distributed with this work for additional information\n+ * regarding copyright ownership. ElasticSearch licenses this\n+ * file to you under the Apache License, Version 2.0 (the\n+ * \"License\"); you may not use this file except in compliance\n+ * with the License. You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing,\n+ * software distributed under the License is distributed on an\n+ * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n+ * KIND, either express or implied. See the License for the\n+ * specific language governing permissions and limitations\n+ * under the License.\n+ */\n+\n+package org.elasticsearch.indices;\n+\n+import org.elasticsearch.index.Index;\n+import org.elasticsearch.index.IndexException;\n+import org.elasticsearch.rest.RestStatus;\n+\n+/**\n+ * Thrown when some action cannot be performed because the primary shard of\n+ * some shard group in an index has not been allocated post api action.\n+ */\n+public class IndexPrimaryShardNotAllocatedException extends IndexException {\n+\n+ public IndexPrimaryShardNotAllocatedException(Index index) {\n+ super(index, \"primary not allocated post api\");\n+ }\n+\n+ @Override\n+ public RestStatus status() {\n+ return RestStatus.CONFLICT;\n+ }\n+}", "filename": "src/main/java/org/elasticsearch/indices/IndexPrimaryShardNotAllocatedException.java", "status": "added" }, { "diff": "@@ -20,7 +20,9 @@\n package org.elasticsearch.test.integration.indices.settings;\n \n import org.elasticsearch.ElasticSearchIllegalArgumentException;\n+import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;\n import org.elasticsearch.cluster.metadata.IndexMetaData;\n+import org.elasticsearch.common.Priority;\n import org.elasticsearch.common.settings.ImmutableSettings;\n import org.elasticsearch.test.integration.AbstractSharedClusterTest;\n import org.testng.annotations.Test;\n@@ -63,6 +65,10 @@ public void testOpenCloseUpdateSettings() throws Exception {\n \n // now close the index, change the non dynamic setting, and see that it applies\n \n+ // Wait for the index to turn green before attempting to close it\n+ ClusterHealthResponse health = client().admin().cluster().prepareHealth().setTimeout(\"30s\").setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();\n+ assertThat(health.isTimedOut(), equalTo(false));\n+\n client().admin().indices().prepareClose(\"test\").execute().actionGet();\n \n client().admin().indices().prepareUpdateSettings(\"test\")", "filename": "src/test/java/org/elasticsearch/test/integration/indices/settings/UpdateSettingsTests.java", "status": "modified" }, { "diff": "@@ -20,9 +20,10 @@\n package org.elasticsearch.test.integration.indices.state;\n \n import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;\n+import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;\n import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;\n+import org.elasticsearch.action.admin.indices.close.CloseIndexResponse;\n import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;\n-import org.elasticsearch.action.admin.indices.status.IndicesStatusResponse;\n import org.elasticsearch.cluster.block.ClusterBlockException;\n import org.elasticsearch.cluster.metadata.IndexMetaData;\n import org.elasticsearch.cluster.routing.ShardRoutingState;\n@@ -31,10 +32,10 @@\n import org.elasticsearch.common.logging.Loggers;\n import org.elasticsearch.common.settings.SettingsException;\n import org.elasticsearch.indices.IndexMissingException;\n+import org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException;\n import org.elasticsearch.test.integration.AbstractNodesTests;\n import org.testng.annotations.AfterMethod;\n import org.testng.annotations.Test;\n-\n import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;\n import static org.hamcrest.MatcherAssert.assertThat;\n import static org.hamcrest.Matchers.equalTo;\n@@ -61,50 +62,70 @@ public void testSimpleOpenClose() {\n logger.info(\"--> creating test index\");\n client(\"node1\").admin().indices().prepareCreate(\"test\").execute().actionGet();\n \n- logger.info(\"--> waiting for green status\");\n- ClusterHealthResponse health = client(\"node1\").admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes(\"2\").execute().actionGet();\n- assertThat(health.isTimedOut(), equalTo(false));\n-\n- ClusterStateResponse stateResponse = client(\"node1\").admin().cluster().prepareState().execute().actionGet();\n- assertThat(stateResponse.getState().metaData().index(\"test\").state(), equalTo(IndexMetaData.State.OPEN));\n- assertThat(stateResponse.getState().routingTable().index(\"test\").shards().size(), equalTo(5));\n- assertThat(stateResponse.getState().routingTable().index(\"test\").shardsWithState(ShardRoutingState.STARTED).size(), equalTo(10));\n-\n- logger.info(\"--> indexing a simple document\");\n- client(\"node1\").prepareIndex(\"test\", \"type1\", \"1\").setSource(\"field1\", \"value1\").execute().actionGet();\n+ waitForGreen();\n+ assertOpen(5, 10);\n \n- logger.info(\"--> closing test index...\");\n- client(\"node1\").admin().indices().prepareClose(\"test\").execute().actionGet();\n+ closeIndex();\n+ assertClosed();\n \n- stateResponse = client(\"node1\").admin().cluster().prepareState().execute().actionGet();\n- assertThat(stateResponse.getState().metaData().index(\"test\").state(), equalTo(IndexMetaData.State.CLOSE));\n- assertThat(stateResponse.getState().routingTable().index(\"test\"), nullValue());\n+ openIndex();\n+ waitForGreen();\n+ assertOpen(5, 10);\n+ }\n \n- logger.info(\"--> testing indices status api...\");\n- IndicesStatusResponse indicesStatusResponse = client(\"node1\").admin().indices().prepareStatus().execute().actionGet();\n+ /**\n+ * Verify that attempts to close an index right after it is created are rejected.\n+ * Also verifies that the index can later be closed and properly reopened.\n+ * Since this test relies on things taking time, it might fail spuriously. Sorry.\n+ */\n+ @Test\n+ public void testFastCloseAfterCreateDoesNotClose() {\n+ logger.info(\"--> starting two nodes....\");\n+ startNode(\"node1\");\n+ startNode(\"node2\");\n \n- logger.info(\"--> trying to index into a closed index ...\");\n- try {\n- client(\"node1\").prepareIndex(\"test\", \"type1\", \"1\").setSource(\"field1\", \"value1\").execute().actionGet();\n- assert false;\n- } catch (ClusterBlockException e) {\n- // all is well\n+ int shards = 2;\n+ while (true) {\n+ logger.info(\"--> creating test index with {} shards\", shards);\n+ client(\"node1\").admin().indices().prepareCreate(\"test\").setSettings(\n+ \"index.number_of_shards\", shards, \"index.number_of_replicas\", 1).execute().actionGet();\n+\n+ logger.info(\"--> triggering a fast close\");\n+ boolean caughtFastClose = true;\n+ try {\n+ closeIndex();\n+ caughtFastClose = false;\n+ } catch(IndexPrimaryShardNotAllocatedException e) {\n+ //expected\n+ }\n+ logger.info(\"--> making sure the fast close occured in the expected state: cluster status = red\");\n+ if (getStatus() != ClusterHealthStatus.RED) {\n+ caughtFastClose = false;\n+ }\n+\n+ if (caughtFastClose) {\n+ logger.info(\"--> caught a fast close with {} shards\", shards);\n+ break;\n+ } else {\n+ logger.info(\"--> didn't get a fast close with {} shards so trying more\", shards);\n+ assertThat(\"We run out of attempts to catch a fast close.\", shards <= 1024);\n+ waitForGreen();\n+ client(\"node1\").admin().indices().prepareDelete(\"test\").execute();\n+ ClusterHealthResponse health = client(\"node1\").admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForActiveShards(0).execute().actionGet();\n+ assertThat(health.isTimedOut(), equalTo(false));\n+ shards *= 2;\n+ }\n }\n \n- logger.info(\"--> opening index...\");\n- client(\"node1\").admin().indices().prepareOpen(\"test\").execute().actionGet();\n+ waitForGreen();\n+ assertOpen(shards, shards * 2);\n \n- logger.info(\"--> waiting for green status\");\n- health = client(\"node1\").admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes(\"2\").execute().actionGet();\n- assertThat(health.isTimedOut(), equalTo(false));\n+ closeIndex();\n+ assertClosed();\n \n- stateResponse = client(\"node1\").admin().cluster().prepareState().execute().actionGet();\n- assertThat(stateResponse.getState().metaData().index(\"test\").state(), equalTo(IndexMetaData.State.OPEN));\n- assertThat(stateResponse.getState().routingTable().index(\"test\").shards().size(), equalTo(5));\n- assertThat(stateResponse.getState().routingTable().index(\"test\").shardsWithState(ShardRoutingState.STARTED).size(), equalTo(10));\n-\n- logger.info(\"--> indexing a simple document\");\n- client(\"node1\").prepareIndex(\"test\", \"type1\", \"1\").setSource(\"field1\", \"value1\").execute().actionGet();\n+ openIndex();\n+ waitForGreen();\n+ assertOpen(shards, shards * 2);\n }\n \n @Test\n@@ -132,4 +153,49 @@ public void testConsistencyAfterIndexCreationFailure() {\n assertThat(response.isAcknowledged(), equalTo(true));\n }\n \n+ private void waitForGreen() {\n+ logger.info(\"--> waiting for green status\");\n+ ClusterHealthResponse health = client(\"node1\").admin().cluster().prepareHealth().setTimeout(\"30s\").setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes(\"2\").execute().actionGet();\n+ assertThat(health.isTimedOut(), equalTo(false));\n+ }\n+\n+ private ClusterHealthStatus getStatus() {\n+ logger.info(\"--> should still be red\");\n+ ClusterHealthResponse health = client(\"node1\").admin().cluster().prepareHealth().execute().actionGet();\n+ return health.getStatus();\n+ }\n+\n+ private void openIndex() {\n+ logger.info(\"--> opening index...\");\n+ client(\"node1\").admin().indices().prepareOpen(\"test\").execute().actionGet();\n+ }\n+\n+ private void assertOpen(int expectedPrimaryShards, int expectedTotalShards) {\n+ ClusterStateResponse stateResponse = client(\"node1\").admin().cluster().prepareState().execute().actionGet();\n+ assertThat(stateResponse.getState().metaData().index(\"test\").state(), equalTo(IndexMetaData.State.OPEN));\n+ assertThat(stateResponse.getState().routingTable().index(\"test\").shards().size(), equalTo(expectedPrimaryShards));\n+ assertThat(stateResponse.getState().routingTable().index(\"test\").shardsWithState(ShardRoutingState.STARTED).size(), equalTo(expectedTotalShards));\n+ logger.info(\"--> indexing a simple document\");\n+ client(\"node1\").prepareIndex(\"test\", \"type1\", \"1\").setSource(\"field1\", \"value1\").execute().actionGet();\n+ }\n+\n+ private void closeIndex() {\n+ logger.info(\"--> closing test index...\");\n+ CloseIndexResponse closeResponse = client(\"node1\").admin().indices().prepareClose(\"test\").execute().actionGet();\n+ assertThat(closeResponse.isAcknowledged(), equalTo(true));\n+ }\n+\n+ private void assertClosed() {\n+ ClusterStateResponse stateResponse = client(\"node1\").admin().cluster().prepareState().execute().actionGet();\n+ assertThat(stateResponse.getState().metaData().index(\"test\").state(), equalTo(IndexMetaData.State.CLOSE));\n+ assertThat(stateResponse.getState().routingTable().index(\"test\"), nullValue());\n+\n+ logger.info(\"--> trying to index into a closed index ...\");\n+ try {\n+ client(\"node1\").prepareIndex(\"test\", \"type1\", \"1\").setSource(\"field1\", \"value1\").execute().actionGet();\n+ assert false;\n+ } catch (ClusterBlockException e) {\n+ // all is well\n+ }\n+ }\n }", "filename": "src/test/java/org/elasticsearch/test/integration/indices/state/SimpleIndexStateTests.java", "status": "modified" } ] }
{ "body": "", "comments": [ { "body": "Merged request was pulled in. Thanks!\n", "created_at": "2013-07-18T10:06:12Z" } ], "number": 3342, "title": "Thai language analyzer ignores stopwords configuration setting" }
{ "body": "Modified ThaiAnalyzerProvider so it is now possible to set stopwords for the thai analyzer in index settings.\n\nIssue: #3342\n", "number": 3343, "review_comments": [], "title": "Analysis: update ThaiAnalyzerProvider to use custom stopwords setting" }
{ "commits": [ { "message": "Analysis: update ThaiAnalyzerProvider to use custom stopwords setting" } ], "files": [ { "diff": "@@ -20,9 +20,11 @@\n package org.elasticsearch.index.analysis;\n \n import org.apache.lucene.analysis.th.ThaiAnalyzer;\n+import org.apache.lucene.analysis.util.CharArraySet;\n import org.elasticsearch.common.inject.Inject;\n import org.elasticsearch.common.inject.assistedinject.Assisted;\n import org.elasticsearch.common.settings.Settings;\n+import org.elasticsearch.env.Environment;\n import org.elasticsearch.index.Index;\n import org.elasticsearch.index.settings.IndexSettings;\n \n@@ -34,9 +36,9 @@ public class ThaiAnalyzerProvider extends AbstractIndexAnalyzerProvider<ThaiAnal\n private final ThaiAnalyzer analyzer;\n \n @Inject\n- public ThaiAnalyzerProvider(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {\n+ public ThaiAnalyzerProvider(Index index, @IndexSettings Settings indexSettings, Environment env, @Assisted String name, @Assisted Settings settings) {\n super(index, indexSettings, name, settings);\n- analyzer = new ThaiAnalyzer(version);\n+\tanalyzer = new ThaiAnalyzer(version, Analysis.parseStopWords(env, settings, ThaiAnalyzer.getDefaultStopSet(), version));\n }\n \n @Override", "filename": "src/main/java/org/elasticsearch/index/analysis/ThaiAnalyzerProvider.java", "status": "modified" } ] }
{ "body": "I have a geojson file generated using ogr2ogr from a shape file.Unfortunately some of the geometry objects are null but I still want to index the other meta data. Currently I get a parse error on these documents because the geo_shape parser is not handling json nulls. I think it would be nicer to not fail and simply not index the geo_shape field for those fields. Would it be possible to fix this?\n\nHere's a sample document. It fails on the geometry field (which is mapped to geo_shape). The error I get is: MapperParsingException[failed to parse [geometry]]; nested: ElasticSearchParseException[Shape must be an object consisting of type and coordinates];\n\n{ \"type\": \"Feature\", \"properties\": { \"name\": \"吉井町宮田\", \"qs_id\": 856730, \"gn_id\": null, \"woe_id\": 28484701, \"gn_id_eh\": null, \"woe_id_eh\": null, \"gn_name\": null, \"gn_ascii\": null, \"gn_country\": null, \"gn_admin1\": null, \"gn_admin2\": null, \"gn_pop\": null, \"gn_fclass\": null, \"gn_fcode\": null, \"woe_name\": \"吉井町宮田\", \"woe_nameen\": null, \"placetype\": \"LocalAdmin\", \"iso\": \"JP\", \"language\": \"JPN\", \"parent_id\": 28379393, \"woe_local\": 28379393, \"woe_lau\": 28484701, \"woe_adm2\": 0, \"woe_adm1\": 58646425, \"woe_adm0\": 23424856, \"name_local\": \"うきは市\", \"name_lau\": \"吉井町宮田\", \"name_adm2\": null, \"name_adm1\": \"福岡県\", \"name_adm0\": \"日本\", \"gns_id\": null, \"accuracy\": null, \"matchtype\": null, \"geom_qual\": null, \"woe_funk\": null, \"photos\": null, \"photos_all\": null, \"woemembers\": null, \"photos_1k\": null, \"photos_9k\": null, \"photos_sr\": 0, \"photos_9r\": 0, \"pop_sr\": 0 }, \"geometry\": null }\n", "comments": [ { "body": "Hi @jillesvangurp, setting shapes to `null` is part of the geo-refactoring in 1.0 inspired by #2708. Nevertheless the current version also throws a parsing exception. I'll fix this for 1.0 as soon as possible.\n", "created_at": "2013-07-15T14:50:58Z" } ], "number": 3310, "title": "geo_shape null geometry" }
{ "body": "The current shape builders allow parsing `null` shapes but if these values get indexed a parsing exception (nullpointer) is thrown. This commit catches the actual `null` shape and ignored any field creation.\n\nCloses #3310\n", "number": 3332, "review_comments": [], "title": "Fixed nullshape indexing" }
{ "commits": [ { "message": "Fixed nullshape indexing.\nCloses #3310" } ], "files": [ { "diff": "@@ -213,6 +213,9 @@ public FieldDataType defaultFieldDataType() {\n public void parse(ParseContext context) throws IOException {\n try {\n ShapeBuilder shape = ShapeBuilder.parse(context.parser());\n+ if(shape == null) {\n+ return;\n+ }\n Field[] fields = defaultStrategy.createIndexableFields(shape.build());\n if (fields == null || fields.length == 0) {\n return;", "filename": "src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java", "status": "modified" }, { "diff": "@@ -21,18 +21,16 @@\n \n import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;\n import static org.elasticsearch.index.query.FilterBuilders.geoIntersectionFilter;\n-import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;\n-import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery;\n-import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;\n+import static org.elasticsearch.index.query.QueryBuilders.*;\n import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;\n import static org.hamcrest.MatcherAssert.assertThat;\n-import static org.hamcrest.Matchers.equalTo;\n-import static org.hamcrest.Matchers.instanceOf;\n+import static org.hamcrest.Matchers.*;\n \n import java.io.IOException;\n import java.util.List;\n import java.util.Map;\n \n+import org.elasticsearch.action.get.GetResponse;\n import org.elasticsearch.action.search.SearchResponse;\n import org.elasticsearch.common.geo.builders.ShapeBuilder;\n import org.elasticsearch.common.xcontent.XContentBuilder;\n@@ -43,6 +41,21 @@\n \n public class GeoShapeIntegrationTests extends AbstractSharedClusterTest {\n \n+ @Test\n+ public void testNullShape() throws Exception {\n+ String mapping = XContentFactory.jsonBuilder().startObject().startObject(\"type1\")\n+ .startObject(\"properties\").startObject(\"location\")\n+ .field(\"type\", \"geo_shape\")\n+ .endObject().endObject()\n+ .endObject().endObject().string();\n+ prepareCreate(\"test\").addMapping(\"type1\", mapping).execute().actionGet();\n+ ensureGreen();\n+ \n+ client().prepareIndex(\"test\", \"type1\", \"aNullshape\").setSource(\"{\\\"location\\\": null}\").execute().actionGet();\n+ GetResponse result = client().prepareGet(\"test\", \"type1\", \"aNullshape\").execute().actionGet();\n+ assertThat(result.getField(\"location\"), nullValue());\n+ }\n+ \n @Test\n public void testIndexPointsFilterRectangle() throws Exception {\n String mapping = XContentFactory.jsonBuilder().startObject().startObject(\"type1\")", "filename": "src/test/java/org/elasticsearch/test/integration/search/geo/GeoShapeIntegrationTests.java", "status": "modified" } ] }
{ "body": "When a query requests both sorting and pagination, the expected behavior is that sorting is executed _before_ pagination. And indeed this is the actual behavior when the query requests sorting on a field level. However, when doing script based sorting, it seems that sorting is only executed _after_ pagination. This is behavior is unexpected and especially surprising because the same sort may yield different results depending on whether it is specified on a field level or via a script.\n\nExample\n\nSuppose we have 100 files indexed, named \"file001\", \"file002\", ..., \"file100\", with the file name being stored in a \"filename\" field. Then the following query\n\n```\n{\n \"from\" : 0,\n \"size\" : 10,\n \"query\" : {\n \"match_all\" : { }\n },\n \"sort\" : [ {\n \"filename\" : {\n \"order\" : \"asc\"\n }\n } ]\n}\n```\n\nyields the expected ordering \"file001\", \"file002\", ..., \"file010\". The seemingly equivalent query\n\n```\n{\n \"from\" : 0,\n \"size\" : 10,\n \"query\" : {\n \"match_all\" : { }\n },\n \"sort\" : [ {\n \"_script\" : {\n \"script\" : \"doc['filename'].value\",\n \"type\" : \"string\"\n }\n } ]\n}\n```\n\nmay return, for example, \"file005\", \"file007\", \"file020\", \"file027\", \"file035\", \"file050\", \"file067\", \"file080\", \"file092\", \"file097\". Which is pretty useless and certainly not what the client would expect.\n\nThis behavior was observed in ElasticSearch 0.90.2, but I didn't test previous versions.\n", "comments": [ { "body": "script sorting doesn't sort after pagination, can you provide a full curl recreation so we can check it out?\n", "created_at": "2013-07-11T21:51:05Z" }, { "body": "It looks like a bug that was introduced between 0.90.0RC2 and 0.90.0. Here is a [repro](https://gist.github.com/imotov/5980913). I didn't have a chance to dig into it yet but it looks like it got broken after this [commit](https://github.com/elasticsearch/elasticsearch/commit/f372f7c109b550c6b20b8196713aa313ad6c249f).\n", "created_at": "2013-07-12T02:41:24Z" } ], "number": 3309, "title": "Script based sorting is applied only after pagination" }
{ "body": "The actual documents value was never calculated if setSpare wasn't called\nbefore compareBottom was called on a certain document.\n\nCloses #3309\n", "number": 3331, "review_comments": [], "title": "Set spare becore comparing comparator bottom value" }
{ "commits": [ { "message": "Set spare becore comparing comparator bottom value\n\nThe actual documents value was never calculated if setSpare wasn't called\nbefore compareBottom was called on a certain document.\n\nCloses #3309" } ], "files": [ { "diff": "@@ -59,11 +59,11 @@ public SortField.Type reducedType() {\n \n private final SearchScript script;\n \n- private BytesRef[] values;\n+ private BytesRef[] values; // TODO maybe we can preallocate or use a sentinel to prevent the conditionals in compare\n \n private BytesRef bottom;\n \n- private BytesRef spare = new BytesRef();\n+ private final BytesRef spare = new BytesRef();\n \n private int spareDoc = -1;\n \n@@ -102,10 +102,10 @@ public int compare(int slot1, int slot2) {\n \n @Override\n public int compareBottom(int doc) {\n- \n if (bottom == null) {\n return -1;\n }\n+ setSpare(doc);\n return bottom.compareTo(spare);\n }\n \n@@ -120,7 +120,6 @@ private void setSpare(int doc) {\n if (spareDoc == doc) {\n return;\n }\n- \n script.setNextDocId(doc);\n spare.copyChars(script.run().toString());\n spareDoc = doc;", "filename": "src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/StringScriptDataComparator.java", "status": "modified" }, { "diff": "@@ -35,6 +35,8 @@\n import static org.hamcrest.Matchers.not;\n import static org.hamcrest.Matchers.nullValue;\n \n+import org.elasticsearch.search.sort.ScriptSortBuilder;\n+\n \n import java.io.IOException;\n import java.util.ArrayList;\n@@ -335,6 +337,37 @@ private void testSimpleSorts(int numberOfShards) throws Exception {\n }\n \n assertThat(searchResponse.toString(), not(containsString(\"error\")));\n+ \n+ \n+ // STRING script\n+ size = 1 + random.nextInt(10);\n+\n+ searchResponse = client().prepareSearch()\n+ .setQuery(matchAllQuery())\n+ .setSize(size)\n+ .addSort(new ScriptSortBuilder(\"doc['str_value'].value\", \"string\"))\n+ .execute().actionGet();\n+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));\n+ assertThat(searchResponse.getHits().hits().length, equalTo(size));\n+ for (int i = 0; i < size; i++) {\n+ assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(i)));\n+ assertThat(searchResponse.getHits().getAt(i).sortValues()[0].toString(), equalTo(new String(new char[]{(char) (97 + i), (char) (97 + i)})));\n+ }\n+ size = 1 + random.nextInt(10);\n+ searchResponse = client().prepareSearch()\n+ .setQuery(matchAllQuery())\n+ .setSize(size)\n+ .addSort(\"str_value\", SortOrder.DESC)\n+ .execute().actionGet();\n+\n+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));\n+ assertThat(searchResponse.getHits().hits().length, equalTo(size));\n+ for (int i = 0; i < size; i++) {\n+ assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i)));\n+ assertThat(searchResponse.getHits().getAt(i).sortValues()[0].toString(), equalTo(new String(new char[]{(char) (97 + (9 - i)), (char) (97 + (9 - i))})));\n+ }\n+\n+ assertThat(searchResponse.toString(), not(containsString(\"error\")));\n \n // BYTE\n size = 1 + random.nextInt(10);", "filename": "src/test/java/org/elasticsearch/test/integration/search/sort/SimpleSortTests.java", "status": "modified" } ] }
{ "body": "When setting a timestamp field as not indexed, the value is not saved correctly in the cluster state.\n\nUsing the following index template:\n\n```\n{\n \"foo_template\": {\n \"template\": \"foo-*\",\n \"settings\": {\n \"index.number_of_shards\": 1,\n \"index.number_of_replicas\": 0\n },\n \"mappings\": {\n \"type1\": {\n \"_timestamp\" : { \"enabled\" : true, \"index\": \"no\", \"store\": \"yes\"},\n \"properties\": {\n \"test\": {\"type\" : \"long\", \"index\" : \"no\"},\n \"baz\": {\"type\" : \"boolean\", \"index\" : \"no\"}\n }\n }\n }\n }\n}\n```\n\nCreating a new index (foo-test) with that template works correctly. The mapping returned is as follows:\n\n```\n{\n \"foo-test\": {\n \"type1\": {\n \"_timestamp\": {\n \"enabled\": true,\n \"index\": false,\n \"store\": true\n },\n \"properties\": {\n \"baz\": {\n \"type\": \"boolean\",\n \"index\": \"no\"\n },\n \"test\": {\n \"type\": \"long\",\n \"index\": \"no\"\n }\n }\n }\n }\n}\n```\n\nNotice that the values for the timestamp field now uses boolean values instead of yes/no. \nWhenever the cluster state is recovered, the boolean values are still used, causing an exception\n\n```\n[2013-06-12 16:34:43,481][INFO ][gateway ] [searchnode] recovered [1] indices into cluster_state\n[2013-06-12 16:34:43,482][DEBUG][indices.cluster ] [searchnode] [foo-test] adding mapping [type1], source [{\"type1\":{\"_timestamp\":{\"enabled\":true,\"index\":false,\"store\":true},\"properties\":{\"baz\":{\"type\":\"boolean\",\"index\":\"no\"},\"test\":{\"type\":\"long\",\"index\":\"no\"}}}}]\n[2013-06-12 16:34:43,482][WARN ][indices.cluster ] [searchnode] [foo-test] failed to add mapping [type1], source [{\"type1\":{\"_timestamp\":{\"enabled\":true,\"index\":false,\"store\":true},\"properties\":{\"baz\":{\"type\":\"boolean\",\"index\":\"no\"},\"test\":{\"type\":\"long\",\"index\":\"no\"}}}}]\norg.elasticsearch.index.mapper.MapperParsingException: Wrong value for index [false] for field [_timestamp]\n```\n\nThe index setting on the timestamp field works correctly on 0.20.0RC1. The biggest change is the toXContent method. The build sets the fields as a boolean: builder.field(\"index\", fieldType.indexed()), which works on other field mappers.\n\nI have not ran the test in a debugger yet, but will do so shortly.\n\nThe timestamp field in 0.20.0RC1\n\n```\n_timestamp: {\n enabled: true\n index: no\n store: yes\n}\n```\n", "comments": [ { "body": "The fix should be setting the field as:\nbuilder.field(\"index\", indexTokenizeOptionToString(fieldType.indexed(), fieldType.tokenized()));\n\nCan submit a pull request, but it's a one line change. :) Will do so anyways later on. This issue probably affects the RoutingFieldMapper as well.\n", "created_at": "2013-06-13T00:09:57Z" }, { "body": " a PR would be awesome maybe including a testcase?\n", "created_at": "2013-06-13T08:27:33Z" }, { "body": "This issue is one of those times when the test case will be much longer than the fix.:)\n\nI will attempt do have a PR shortly. This feature is not essential in my system and I am in the middle of a Lucene 4.3/elasticsearch 0.90 upgrade. However, if 0.90.2 will be released soon, I will work on it first. Any word on its release?\n", "created_at": "2013-06-13T16:21:48Z" }, { "body": "I finally had time to fix this issue today (day after a holiday is slow around here), and you already fixed it. The fix is easy, but will the current tests simulate the serialization/deserialization of the index settings? Most tests skip this step.\n", "created_at": "2013-07-05T18:23:48Z" }, { "body": "The routing field also needs to be fixed:\nhttps://github.com/brusic/elasticsearch/commit/a660fcc53c97f1b47c8dcde08c8edb6dbf4d9e34\n\n(I accidentally created a git branch off another branch, if not I would submit a pull request)\n", "created_at": "2013-07-05T19:16:00Z" } ], "number": 3174, "title": "Timestamp index settings incorrectly stored" }
{ "body": "The index field was serialized as a boolean instead of showing the\n'analyed', 'not_analzyed', 'no' options. Fixed by calling\nindexTokenizeOptionToString() in the builder.\n\nCloses #3174\n", "number": 3294, "review_comments": [], "title": "Fix xcontent serialization of timestamp index field" }
{ "commits": [ { "message": "Fix xcontent serialization of timestamp/routing index field\n\nThe index field was serialized as a boolean instead of showing the\n'analyed', 'not_analzyed', 'no' options. Fixed by calling\nindexTokenizeOptionToString() in the builder.\n\nCloses #3174" } ], "files": [ { "diff": "@@ -238,7 +238,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws\n }\n builder.startObject(CONTENT_TYPE);\n if (fieldType.indexed() != Defaults.FIELD_TYPE.indexed()) {\n- builder.field(\"index\", fieldType.indexed());\n+ builder.field(\"index\", indexTokenizeOptionToString(fieldType.indexed(), fieldType.tokenized()));\n }\n if (fieldType.stored() != Defaults.FIELD_TYPE.stored()) {\n builder.field(\"store\", fieldType.stored());", "filename": "src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java", "status": "modified" }, { "diff": "@@ -226,7 +226,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws\n }\n if (enabledState.enabled) {\n if (fieldType.indexed() != Defaults.FIELD_TYPE.indexed()) {\n- builder.field(\"index\", fieldType.indexed());\n+ builder.field(\"index\", indexTokenizeOptionToString(fieldType.indexed(), fieldType.tokenized()));\n }\n if (fieldType.stored() != Defaults.FIELD_TYPE.stored()) {\n builder.field(\"store\", fieldType.stored());", "filename": "src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java", "status": "modified" }, { "diff": "@@ -19,15 +19,19 @@\n \n package org.elasticsearch.test.unit.index.mapper.routing;\n \n+import org.elasticsearch.common.xcontent.XContentBuilder;\n import org.elasticsearch.common.xcontent.XContentFactory;\n+import org.elasticsearch.common.xcontent.json.JsonXContent;\n import org.elasticsearch.index.mapper.DocumentMapper;\n import org.elasticsearch.index.mapper.ParsedDocument;\n import org.elasticsearch.index.mapper.SourceToParse;\n import org.elasticsearch.test.unit.index.mapper.MapperTests;\n import org.testng.annotations.Test;\n \n+import java.util.Map;\n+\n import static org.hamcrest.MatcherAssert.assertThat;\n-import static org.hamcrest.Matchers.equalTo;\n+import static org.hamcrest.Matchers.*;\n \n /**\n *\n@@ -49,4 +53,39 @@ public void simpleRoutingMapperTests() throws Exception {\n assertThat(doc.rootDoc().get(\"_routing\"), equalTo(\"routing_value\"));\n assertThat(doc.rootDoc().get(\"field\"), equalTo(\"value\"));\n }\n+\n+ @Test\n+ public void testSetValues() throws Exception {\n+ String mapping = XContentFactory.jsonBuilder().startObject().startObject(\"type\")\n+ .startObject(\"_routing\")\n+ .field(\"store\", \"no\")\n+ .field(\"index\", \"no\")\n+ .field(\"path\", \"route\")\n+ .endObject()\n+ .endObject().endObject().string();\n+ DocumentMapper docMapper = MapperTests.newParser().parse(mapping);\n+ assertThat(docMapper.routingFieldMapper().fieldType().stored(), equalTo(false));\n+ assertThat(docMapper.routingFieldMapper().fieldType().indexed(), equalTo(false));\n+ assertThat(docMapper.routingFieldMapper().path(), equalTo(\"route\"));\n+ }\n+\n+ @Test\n+ public void testThatSerializationWorksCorrectlyForIndexField() throws Exception {\n+ String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject(\"type\")\n+ .startObject(\"_routing\").field(\"store\", \"no\").field(\"index\", \"no\").endObject()\n+ .endObject().endObject().string();\n+ DocumentMapper enabledMapper = MapperTests.newParser().parse(enabledMapping);\n+\n+ XContentBuilder builder = JsonXContent.contentBuilder().startObject();\n+ enabledMapper.routingFieldMapper().toXContent(builder, null).endObject();\n+ builder.close();\n+ Map<String, Object> serializedMap = JsonXContent.jsonXContent.createParser(builder.bytes()).mapAndClose();\n+ assertThat(serializedMap, hasKey(\"_routing\"));\n+ assertThat(serializedMap.get(\"_routing\"), instanceOf(Map.class));\n+ Map<String, Object> routingConfiguration = (Map<String, Object>) serializedMap.get(\"_routing\");\n+ assertThat(routingConfiguration, hasKey(\"store\"));\n+ assertThat(routingConfiguration.get(\"store\").toString(), is(\"false\"));\n+ assertThat(routingConfiguration, hasKey(\"index\"));\n+ assertThat(routingConfiguration.get(\"index\").toString(), is(\"no\"));\n+ }\n }", "filename": "src/test/java/org/elasticsearch/test/unit/index/mapper/routing/RoutingTypeMapperTests.java", "status": "modified" }, { "diff": "@@ -20,10 +20,12 @@\n package org.elasticsearch.test.unit.index.mapper.timestamp;\n \n import java.util.Locale;\n+import java.util.Map;\n \n import org.elasticsearch.common.bytes.BytesReference;\n import org.elasticsearch.common.xcontent.XContentBuilder;\n import org.elasticsearch.common.xcontent.XContentFactory;\n+import org.elasticsearch.common.xcontent.json.JsonXContent;\n import org.elasticsearch.index.mapper.DocumentMapper;\n import org.elasticsearch.index.mapper.ParsedDocument;\n import org.elasticsearch.index.mapper.SourceToParse;\n@@ -32,9 +34,7 @@\n import org.testng.annotations.Test;\n \n import static org.hamcrest.MatcherAssert.assertThat;\n-import static org.hamcrest.Matchers.equalTo;\n-import static org.hamcrest.Matchers.is;\n-import static org.hamcrest.Matchers.notNullValue;\n+import static org.hamcrest.Matchers.*;\n \n /**\n */\n@@ -132,4 +132,24 @@ public void testThatDisablingFieldMapperDoesNotReturnAnyUselessInfo() throws Exc\n \n assertThat(builder.string(), is(String.format(Locale.ROOT, \"{\\\"%s\\\":{}}\", TimestampFieldMapper.NAME)));\n }\n+\n+ @Test // issue 3174\n+ public void testThatSerializationWorksCorrectlyForIndexField() throws Exception {\n+ String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject(\"type\")\n+ .startObject(\"_timestamp\").field(\"enabled\", true).field(\"store\", \"yes\").field(\"index\", \"no\").endObject()\n+ .endObject().endObject().string();\n+ DocumentMapper enabledMapper = MapperTests.newParser().parse(enabledMapping);\n+\n+ XContentBuilder builder = JsonXContent.contentBuilder().startObject();\n+ enabledMapper.timestampFieldMapper().toXContent(builder, null).endObject();\n+ builder.close();\n+ Map<String, Object> serializedMap = JsonXContent.jsonXContent.createParser(builder.bytes()).mapAndClose();\n+ assertThat(serializedMap, hasKey(\"_timestamp\"));\n+ assertThat(serializedMap.get(\"_timestamp\"), instanceOf(Map.class));\n+ Map<String, Object> timestampConfiguration = (Map<String, Object>) serializedMap.get(\"_timestamp\");\n+ assertThat(timestampConfiguration, hasKey(\"store\"));\n+ assertThat(timestampConfiguration.get(\"store\").toString(), is(\"true\"));\n+ assertThat(timestampConfiguration, hasKey(\"index\"));\n+ assertThat(timestampConfiguration.get(\"index\").toString(), is(\"no\"));\n+ }\n }\n\\ No newline at end of file", "filename": "src/test/java/org/elasticsearch/test/unit/index/mapper/timestamp/TimestampMappingTests.java", "status": "modified" } ] }
{ "body": "According to the documentation:\n\nNote: In order to use the mlt feature a mlt_field needs to be either be stored, store term_vector or source needs to be enabled.\n\nBut,running this:\n\n```\ncurl -XPOST http://localhost:9200/foo\ncurl -XPUT http://localhost:9200/foo/bar/_mapping -d '{ \"bar\": { \"dynamic\": \"strict\", \"properties\": { \"id\": { \"type\": \"integer\", \"index\": \"not_analyzed\" }, \"content\": { \"type\": \"string\", \"analyzer\": \"standard\" }}}}'\n\ncurl -XPUT http://localhost:9200/foo/bar/1 -d '{\"id\":1, \"content\":\"foo bar foo2 bar2 foo3 bar3\"}'\ncurl -XPUT http://localhost:9200/foo/bar/2 -d '{\"id\":2, \"content\":\"foo3 bar3 foo4 bar4\"}'\n\n\ncurl -XGET 'http://localhost:9200/foo/bar/1/_mlt?mlt_fields=content&min_term_freq=1&min_doc_freq=1'\ncurl -XGET 'http://localhost:9200/foo/bar/1/_mlt?min_term_freq=1&min_doc_freq=1'\n```\n\nfails(second query) with:\n{\"error\":\"MapperParsingException[failed to parse [id]]; nested: ElasticSearchIllegalStateException[Field should have either a string, numeric or binary value]; \",\"status\":400}\n\nThis is basically because here(for example):\nhttps://github.com/elasticsearch/elasticsearch/blob/master/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java#L356-L360\n\nThe numeric value is not actually used unless the field is stored.\n\nThen here:\nhttps://github.com/elasticsearch/elasticsearch/blob/master/src/main/java/org/elasticsearch/action/mlt/TransportMoreLikeThisAction.java#L293-L303\n\nif you can't read it, it will just thrown an exception.\n", "comments": [ { "body": "any comments on that? I could try going around and store the fields(even though its not the best scenario...), but would be nice also having that without the need to reindex...\n", "created_at": "2013-07-02T08:18:22Z" }, { "body": "@lmenezes @jpountz says that he will have a look at it\n", "created_at": "2013-07-02T09:33:11Z" }, { "body": "@clintongormley cool :)\n", "created_at": "2013-07-02T09:41:36Z" }, { "body": "@lmenezes You are right about why you got this error but unfortunately setting the value of the field instance even when the field is not stored won't work. The reason is that Lucene's MoreLikeThis can only work on top on character token streams and numeric fields are encoded as binary token streams.\n\nThis issue is very similar to #3211, where we decided to ignore numeric fields when performing highlighting in order to match Elasticsearch 0.20 behavior. Maybe we should do the same here? @clintongormley what do you think?\n", "created_at": "2013-07-03T09:57:21Z" }, { "body": "My feeling is that the `more_like_this` functionality is about finding string terms in common, rather than numeric similarity, so I would agree with you on ignoring non-string fields. Numeric similarity implies a different type of comparison, which would be usually be better handled by a specific clause outside the `mlt` query.\n\nIf you want to treat numbers as \"full text\" then you can always use a `multi_field` to index them both as numbers and as strings.\n\nSo ++ for ignoring non-strings, I'd say.\n", "created_at": "2013-07-03T10:02:22Z" }, { "body": "@jpountz @clintongormley I don't really agree, since if the numbers are ids for some kind of relation, they represent similarity as well or even better than matching tokens. But, if it's a lucene limitation, ignoring is definitely better than failing. Still, would be nice having that working on numeric fields(I guess that affects everything that internally is stored as a number, like ips?).\nBut yeah, ignoring is ok.\n", "created_at": "2013-07-03T10:23:15Z" }, { "body": "@lmenezes This is correct, the limitation is in Lucene and this affects everything which is stored as a number, so byte, short, integer, long, float and double but also ips and dates. There might be options to support numbers in the future but right now I think the best fix to apply is to ignore numeric data from the mlt fields.\n", "created_at": "2013-07-03T13:37:42Z" }, { "body": "@jpountz cool, waiting for the fix then :)\n", "created_at": "2013-07-03T13:46:17Z" }, { "body": "The mlt API uses the mlt query, so I updated to pull request:\n- the mlt API doesn't fail even if one of the fields of the document is numeric,\n- mlt and flt queries fail if any of the fields is numeric,\n- the new fail_on_unsupported_field parameter (defaults to true) allows for ignoring numeric fields instead of raising an error when set to false.\n", "created_at": "2013-07-15T18:04:39Z" }, { "body": "sounds good :+1: \n", "created_at": "2013-07-16T08:01:14Z" } ], "number": 3252, "title": "Error on MoreLikeThis API with Non Stored Numeric Fields" }
{ "body": "More-like-this and fuzzy-like-this queries expect analyzers which are able to\ngenerate character terms (CharTermAttribute), so unfortunately this doesn't\nwork with analyzers which generate binary-only terms (BinaryTermAttribute,\nthe default CharTermAttribute impl being a special BinaryTermAttribute) such as\nour analyzers for numeric fields (byte, short, integer, long, float, double but\nalso date and ip).\n\nTo work around this issue, this commits adds a fail_on_unsupported_field\nparameter to the more-like-this and fuzzy-like-this parsers. When this parameter\nis false, numeric fields will just be ignored and when it is true, an error will\nbe returned, saying that these queries don't support numeric fields. By default,\nthis setting is true but the mlt API sets it to true in order not to fail on\ndocuments which contain numeric fields.\n\nClose #3252\n", "number": 3291, "review_comments": [], "title": "Add the ability to ignore or fail on numeric fields when executing more-like-this or fuzzy-like-this queries." }
{ "commits": [ { "message": "Add the ability to ignore or fail on numeric fields when executing more-like-this or fuzzy-like-this queries.\n\nMore-like-this and fuzzy-like-this queries expect analyzers which are able to\ngenerate character terms (CharTermAttribute), so unfortunately this doesn't\nwork with analyzers which generate binary-only terms (BinaryTermAttribute,\nthe default CharTermAttribute impl being a special BinaryTermAttribute) such as\nour analyzers for numeric fields (byte, short, integer, long, float, double but\nalso date and ip).\n\nTo work around this issue, this commits adds a fail_on_unsupported_field\nparameter to the more-like-this and fuzzy-like-this parsers. When this parameter\nis false, numeric fields will just be ignored and when it is true, an error will\nbe returned, saying that these queries don't support numeric fields. By default,\nthis setting is true but the mlt API sets it to true in order not to fail on\ndocuments which contain numeric fields.\n\nClose #3252" } ], "files": [ { "diff": "@@ -160,7 +160,7 @@ public void onResponse(GetResponse getResponse) {\n GetField getField = getResponse.getField(field);\n if (getField != null) {\n for (Object value : getField.getValues()) {\n- addMoreLikeThis(request, boolBuilder, getField.getName(), value.toString());\n+ addMoreLikeThis(request, boolBuilder, getField.getName(), value.toString(), true);\n }\n it.remove();\n }\n@@ -282,7 +282,7 @@ public boolean beforeFieldAdded(FieldMapper fieldMapper, Field field, Object par\n }\n \n if (fields.isEmpty() || fields.contains(field.name())) {\n- addMoreLikeThis(request, boolBuilder, fieldMapper, field);\n+ addMoreLikeThis(request, boolBuilder, fieldMapper, field, !fields.isEmpty());\n }\n \n return false;\n@@ -302,11 +302,11 @@ private Object convertField(Field field) {\n }\n }\n \n- private void addMoreLikeThis(MoreLikeThisRequest request, BoolQueryBuilder boolBuilder, FieldMapper fieldMapper, Field field) {\n- addMoreLikeThis(request, boolBuilder, field.name(), fieldMapper.value(convertField(field)).toString());\n+ private void addMoreLikeThis(MoreLikeThisRequest request, BoolQueryBuilder boolBuilder, FieldMapper fieldMapper, Field field, boolean failOnUnsupportedField) {\n+ addMoreLikeThis(request, boolBuilder, field.name(), fieldMapper.value(convertField(field)).toString(), failOnUnsupportedField);\n }\n \n- private void addMoreLikeThis(MoreLikeThisRequest request, BoolQueryBuilder boolBuilder, String fieldName, String likeText) {\n+ private void addMoreLikeThis(MoreLikeThisRequest request, BoolQueryBuilder boolBuilder, String fieldName, String likeText, boolean failOnUnsupportedField) {\n MoreLikeThisFieldQueryBuilder mlt = moreLikeThisFieldQuery(fieldName)\n .likeText(likeText)\n .percentTermsToMatch(request.percentTermsToMatch())\n@@ -317,7 +317,8 @@ private void addMoreLikeThis(MoreLikeThisRequest request, BoolQueryBuilder boolB\n .maxWordLen(request.maxWordLen())\n .minTermFreq(request.minTermFreq())\n .maxQueryTerms(request.maxQueryTerms())\n- .stopWords(request.stopWords());\n+ .stopWords(request.stopWords())\n+ .failOnUnsupportedField(failOnUnsupportedField);\n boolBuilder.should(mlt);\n }\n ", "filename": "src/main/java/org/elasticsearch/action/mlt/TransportMoreLikeThisAction.java", "status": "modified" }, { "diff": "@@ -22,6 +22,9 @@\n import com.google.common.base.Charsets;\n import com.google.common.collect.ImmutableList;\n import com.google.common.collect.ImmutableMap;\n+import org.apache.lucene.analysis.Analyzer;\n+import org.apache.lucene.analysis.NumericTokenStream;\n+import org.apache.lucene.analysis.TokenStream;\n import org.apache.lucene.analysis.ar.ArabicAnalyzer;\n import org.apache.lucene.analysis.bg.BulgarianAnalyzer;\n import org.apache.lucene.analysis.br.BrazilianAnalyzer;\n@@ -48,6 +51,7 @@\n import org.apache.lucene.analysis.ro.RomanianAnalyzer;\n import org.apache.lucene.analysis.ru.RussianAnalyzer;\n import org.apache.lucene.analysis.sv.SwedishAnalyzer;\n+import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;\n import org.apache.lucene.analysis.tr.TurkishAnalyzer;\n import org.apache.lucene.analysis.util.CharArraySet;\n import org.apache.lucene.util.Version;\n@@ -61,10 +65,7 @@\n import org.elasticsearch.env.Environment;\n import org.elasticsearch.index.settings.IndexSettings;\n \n-import java.io.BufferedReader;\n-import java.io.IOException;\n-import java.io.InputStreamReader;\n-import java.io.Reader;\n+import java.io.*;\n import java.net.URL;\n import java.util.*;\n \n@@ -275,4 +276,32 @@ public static Reader getReaderFromFile(Environment env, Settings settings, Strin\n \n return reader;\n }\n+\n+ /**\n+ * Check whether the provided token stream is able to provide character\n+ * terms.\n+ * <p>Although most analyzers generate character terms (CharTermAttribute),\n+ * some token only contain binary terms (BinaryTermAttribute,\n+ * CharTermAttribute being a special type of BinaryTermAttribute), such as\n+ * {@link NumericTokenStream} and unsuitable for highlighting and\n+ * more-like-this queries which expect character terms.</p>\n+ */\n+ public static boolean isCharacterTokenStream(TokenStream tokenStream) {\n+ try {\n+ tokenStream.addAttribute(CharTermAttribute.class);\n+ return true;\n+ } catch (IllegalArgumentException e) {\n+ return false;\n+ }\n+ }\n+\n+ /**\n+ * Check whether {@link TokenStream}s generated with <code>analyzer</code>\n+ * provide with character terms.\n+ * @see #isCharacterTokenStream(TokenStream)\n+ */\n+ public static boolean generatesCharacterTokenStream(Analyzer analyzer, String fieldName) throws IOException {\n+ return isCharacterTokenStream(analyzer.tokenStream(fieldName, new StringReader(\"\")));\n+ }\n+\n }", "filename": "src/main/java/org/elasticsearch/index/analysis/Analysis.java", "status": "modified" }, { "diff": "@@ -356,7 +356,7 @@ public static class CustomByteNumericField extends CustomNumericField {\n private final NumberFieldMapper mapper;\n \n public CustomByteNumericField(NumberFieldMapper mapper, byte number, FieldType fieldType) {\n- super(mapper, mapper.fieldType.stored() ? number : null, fieldType);\n+ super(mapper, number, fieldType);\n this.mapper = mapper;\n this.number = number;\n }", "filename": "src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java", "status": "modified" }, { "diff": "@@ -354,7 +354,7 @@ public static class CustomDoubleNumericField extends CustomNumericField {\n private final NumberFieldMapper mapper;\n \n public CustomDoubleNumericField(NumberFieldMapper mapper, double number, FieldType fieldType) {\n- super(mapper, mapper.fieldType().stored() ? number : null, fieldType);\n+ super(mapper, number, fieldType);\n this.mapper = mapper;\n this.number = number;\n }", "filename": "src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java", "status": "modified" }, { "diff": "@@ -350,7 +350,7 @@ public static class CustomFloatNumericField extends CustomNumericField {\n private final NumberFieldMapper mapper;\n \n public CustomFloatNumericField(NumberFieldMapper mapper, float number, FieldType fieldType) {\n- super(mapper, mapper.fieldType().stored() ? number : null, fieldType);\n+ super(mapper, number, fieldType);\n this.mapper = mapper;\n this.number = number;\n }", "filename": "src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java", "status": "modified" }, { "diff": "@@ -353,7 +353,7 @@ public static class CustomIntegerNumericField extends CustomNumericField {\n private final NumberFieldMapper mapper;\n \n public CustomIntegerNumericField(NumberFieldMapper mapper, int number, FieldType fieldType) {\n- super(mapper, mapper.fieldType().stored() ? number : null, fieldType);\n+ super(mapper, number, fieldType);\n this.mapper = mapper;\n this.number = number;\n }", "filename": "src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java", "status": "modified" }, { "diff": "@@ -353,7 +353,7 @@ public static class CustomLongNumericField extends CustomNumericField {\n private final NumberFieldMapper mapper;\n \n public CustomLongNumericField(NumberFieldMapper mapper, long number, FieldType fieldType) {\n- super(mapper, mapper.fieldType.stored() ? number : null, fieldType);\n+ super(mapper, number, fieldType);\n this.mapper = mapper;\n this.number = number;\n }", "filename": "src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java", "status": "modified" }, { "diff": "@@ -358,7 +358,7 @@ public static class CustomShortNumericField extends CustomNumericField {\n private final NumberFieldMapper mapper;\n \n public CustomShortNumericField(NumberFieldMapper mapper, short number, FieldType fieldType) {\n- super(mapper, mapper.fieldType().stored() ? number : null, fieldType);\n+ super(mapper, number, fieldType);\n this.mapper = mapper;\n this.number = number;\n }", "filename": "src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java", "status": "modified" }, { "diff": "@@ -38,6 +38,7 @@ public class FuzzyLikeThisFieldQueryBuilder extends BaseQueryBuilder implements\n private Integer maxQueryTerms;\n private Boolean ignoreTF;\n private String analyzer;\n+ private boolean failOnUnsupportedField;\n \n /**\n * A fuzzy more like this query on the provided field.\n@@ -89,6 +90,14 @@ public FuzzyLikeThisFieldQueryBuilder boost(float boost) {\n return this;\n }\n \n+ /**\n+ * Whether to fail or return no result when this query is run against a field which is not supported such as binary/numeric fields.\n+ */\n+ public FuzzyLikeThisFieldQueryBuilder failOnUnsupportedField(boolean fail) {\n+ failOnUnsupportedField = fail;\n+ return this;\n+ }\n+\n @Override\n protected void doXContent(XContentBuilder builder, Params params) throws IOException {\n builder.startObject(FuzzyLikeThisFieldQueryParser.NAME);\n@@ -115,6 +124,9 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep\n if (analyzer != null) {\n builder.field(\"analyzer\", analyzer);\n }\n+ if (!failOnUnsupportedField) {\n+ builder.field(\"fail_on_unsupported_field\", failOnUnsupportedField);\n+ }\n builder.endObject();\n builder.endObject();\n }", "filename": "src/main/java/org/elasticsearch/index/query/FuzzyLikeThisFieldQueryBuilder.java", "status": "modified" }, { "diff": "@@ -22,9 +22,11 @@\n import org.apache.lucene.analysis.Analyzer;\n import org.apache.lucene.sandbox.queries.FuzzyLikeThisQuery;\n import org.apache.lucene.search.Query;\n+import org.elasticsearch.ElasticSearchIllegalArgumentException;\n import org.elasticsearch.common.Strings;\n import org.elasticsearch.common.inject.Inject;\n import org.elasticsearch.common.xcontent.XContentParser;\n+import org.elasticsearch.index.analysis.Analysis;\n import org.elasticsearch.index.mapper.MapperService;\n \n import java.io.IOException;\n@@ -67,6 +69,7 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n int prefixLength = 0;\n boolean ignoreTF = false;\n Analyzer analyzer = null;\n+ boolean failOnUnsupportedField = true;\n \n XContentParser.Token token = parser.nextToken();\n if (token != XContentParser.Token.FIELD_NAME) {\n@@ -100,6 +103,8 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n prefixLength = parser.intValue();\n } else if (\"analyzer\".equals(currentFieldName)) {\n analyzer = parseContext.analysisService().analyzer(parser.text());\n+ } else if (\"fail_on_unsupported_field\".equals(currentFieldName) || \"failOnUnsupportedField\".equals(currentFieldName)) {\n+ failOnUnsupportedField = parser.booleanValue();\n } else {\n throw new QueryParsingException(parseContext.index(), \"[flt_field] query does not support [\" + currentFieldName + \"]\");\n }\n@@ -122,6 +127,13 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n if (analyzer == null) {\n analyzer = parseContext.mapperService().searchAnalyzer();\n }\n+ if (!Analysis.generatesCharacterTokenStream(analyzer, fieldName)) {\n+ if (failOnUnsupportedField) {\n+ throw new ElasticSearchIllegalArgumentException(\"fuzzy_like_this_field doesn't support binary/numeric fields: [\" + fieldName + \"]\");\n+ } else {\n+ return null;\n+ }\n+ }\n \n FuzzyLikeThisQuery query = new FuzzyLikeThisQuery(maxNumTerms, analyzer);\n query.addTerms(likeText, fieldName, minSimilarity, prefixLength);", "filename": "src/main/java/org/elasticsearch/index/query/FuzzyLikeThisFieldQueryParser.java", "status": "modified" }, { "diff": "@@ -38,6 +38,7 @@ public class FuzzyLikeThisQueryBuilder extends BaseQueryBuilder implements Boost\n private Integer maxQueryTerms;\n private Boolean ignoreTF;\n private String analyzer;\n+ private boolean failOnUnsupportedField = true;;\n \n /**\n * Constructs a new fuzzy like this query which uses the \"_all\" field.\n@@ -96,6 +97,14 @@ public FuzzyLikeThisQueryBuilder boost(float boost) {\n return this;\n }\n \n+ /**\n+ * Whether to fail or return no result when this query is run against a field which is not supported such as binary/numeric fields.\n+ */\n+ public FuzzyLikeThisQueryBuilder failOnUnsupportedField(boolean fail) {\n+ failOnUnsupportedField = fail;\n+ return this;\n+ }\n+\n @Override\n protected void doXContent(XContentBuilder builder, Params params) throws IOException {\n builder.startObject(FuzzyLikeThisQueryParser.NAME);\n@@ -128,6 +137,9 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep\n if (analyzer != null) {\n builder.field(\"analyzer\", analyzer);\n }\n+ if (!failOnUnsupportedField) {\n+ builder.field(\"fail_on_unsupported_field\", failOnUnsupportedField);\n+ }\n builder.endObject();\n }\n }\n\\ No newline at end of file", "filename": "src/main/java/org/elasticsearch/index/query/FuzzyLikeThisQueryBuilder.java", "status": "modified" }, { "diff": "@@ -19,14 +19,18 @@\n \n package org.elasticsearch.index.query;\n \n+import com.google.common.collect.ImmutableList;\n import com.google.common.collect.Lists;\n import org.apache.lucene.analysis.Analyzer;\n import org.apache.lucene.sandbox.queries.FuzzyLikeThisQuery;\n import org.apache.lucene.search.Query;\n+import org.elasticsearch.ElasticSearchIllegalArgumentException;\n import org.elasticsearch.common.inject.Inject;\n import org.elasticsearch.common.xcontent.XContentParser;\n+import org.elasticsearch.index.analysis.Analysis;\n \n import java.io.IOException;\n+import java.util.Iterator;\n import java.util.List;\n \n /**\n@@ -66,6 +70,7 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n int prefixLength = 0;\n boolean ignoreTF = false;\n Analyzer analyzer = null;\n+ boolean failOnUnsupportedField = true;\n \n XContentParser.Token token;\n String currentFieldName = null;\n@@ -87,12 +92,14 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n prefixLength = parser.intValue();\n } else if (\"analyzer\".equals(currentFieldName)) {\n analyzer = parseContext.analysisService().analyzer(parser.text());\n+ } else if (\"fail_on_unsupported_field\".equals(currentFieldName) || \"failOnUnsupportedField\".equals(currentFieldName)) {\n+ failOnUnsupportedField = parser.booleanValue();\n } else {\n throw new QueryParsingException(parseContext.index(), \"[flt] query does not support [\" + currentFieldName + \"]\");\n }\n } else if (token == XContentParser.Token.START_ARRAY) {\n if (\"fields\".equals(currentFieldName)) {\n- fields = Lists.newArrayList();\n+ fields = Lists.newLinkedList();\n while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {\n fields.add(parseContext.indexName(parser.text()));\n }\n@@ -112,13 +119,26 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n \n FuzzyLikeThisQuery query = new FuzzyLikeThisQuery(maxNumTerms, analyzer);\n if (fields == null) {\n- // add the default _all field\n- query.addTerms(likeText, parseContext.defaultField(), minSimilarity, prefixLength);\n- } else {\n- for (String field : fields) {\n- query.addTerms(likeText, field, minSimilarity, prefixLength);\n+ fields = Lists.newArrayList(parseContext.defaultField());\n+ } else if (fields.isEmpty()) {\n+ throw new QueryParsingException(parseContext.index(), \"fuzzy_like_this requires 'fields' to be non-empty\");\n+ }\n+ for (Iterator<String> it = fields.iterator(); it.hasNext(); ) {\n+ final String fieldName = it.next();\n+ if (!Analysis.generatesCharacterTokenStream(analyzer, fieldName)) {\n+ if (failOnUnsupportedField) {\n+ throw new ElasticSearchIllegalArgumentException(\"more_like_this doesn't support binary/numeric fields: [\" + fieldName + \"]\");\n+ } else {\n+ it.remove();\n+ }\n }\n }\n+ if (fields.isEmpty()) {\n+ return null;\n+ }\n+ for (String field : fields) {\n+ query.addTerms(likeText, field, minSimilarity, prefixLength);\n+ }\n query.setBoost(boost);\n query.setIgnoreTF(ignoreTF);\n ", "filename": "src/main/java/org/elasticsearch/index/query/FuzzyLikeThisQueryParser.java", "status": "modified" }, { "diff": "@@ -44,6 +44,7 @@ public class MoreLikeThisFieldQueryBuilder extends BaseQueryBuilder implements B\n private float boostTerms = -1;\n private float boost = -1;\n private String analyzer;\n+ private boolean failOnUnsupportedField;\n \n /**\n * A more like this query that runs against a specific field.\n@@ -157,6 +158,14 @@ public MoreLikeThisFieldQueryBuilder boost(float boost) {\n return this;\n }\n \n+ /**\n+ * Whether to fail or return no result when this query is run against a field which is not supported such as binary/numeric fields.\n+ */\n+ public MoreLikeThisFieldQueryBuilder failOnUnsupportedField(boolean fail) {\n+ failOnUnsupportedField = fail;\n+ return this;\n+ }\n+\n @Override\n protected void doXContent(XContentBuilder builder, Params params) throws IOException {\n builder.startObject(MoreLikeThisFieldQueryParser.NAME);\n@@ -202,6 +211,9 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep\n if (analyzer != null) {\n builder.field(\"analyzer\", analyzer);\n }\n+ if (!failOnUnsupportedField) {\n+ builder.field(\"fail_on_unsupported_field\", failOnUnsupportedField);\n+ }\n builder.endObject();\n builder.endObject();\n }", "filename": "src/main/java/org/elasticsearch/index/query/MoreLikeThisFieldQueryBuilder.java", "status": "modified" }, { "diff": "@@ -22,10 +22,12 @@\n import com.google.common.collect.Sets;\n import org.apache.lucene.analysis.Analyzer;\n import org.apache.lucene.search.Query;\n+import org.elasticsearch.ElasticSearchIllegalArgumentException;\n import org.elasticsearch.common.Strings;\n import org.elasticsearch.common.inject.Inject;\n import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;\n import org.elasticsearch.common.xcontent.XContentParser;\n+import org.elasticsearch.index.analysis.Analysis;\n import org.elasticsearch.index.mapper.MapperService;\n \n import java.io.IOException;\n@@ -65,6 +67,7 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n MoreLikeThisQuery mltQuery = new MoreLikeThisQuery();\n mltQuery.setSimilarity(parseContext.searchSimilarity());\n Analyzer analyzer = null;\n+ boolean failOnUnsupportedField = true;\n \n String currentFieldName = null;\n while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {\n@@ -94,6 +97,8 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n analyzer = parseContext.analysisService().analyzer(parser.text());\n } else if (\"boost\".equals(currentFieldName)) {\n mltQuery.setBoost(parser.floatValue());\n+ } else if (\"fail_on_unsupported_field\".equals(currentFieldName) || \"failOnUnsupportedField\".equals(currentFieldName)) {\n+ failOnUnsupportedField = parser.booleanValue();\n } else {\n throw new QueryParsingException(parseContext.index(), \"[mlt_field] query does not support [\" + currentFieldName + \"]\");\n }\n@@ -130,6 +135,13 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n if (analyzer == null) {\n analyzer = parseContext.mapperService().searchAnalyzer();\n }\n+ if (!Analysis.generatesCharacterTokenStream(analyzer, fieldName)) {\n+ if (failOnUnsupportedField) {\n+ throw new ElasticSearchIllegalArgumentException(\"more_like_this_field doesn't support binary/numeric fields: [\" + fieldName + \"]\");\n+ } else {\n+ return null;\n+ }\n+ }\n mltQuery.setAnalyzer(analyzer);\n mltQuery.setMoreLikeFields(new String[]{fieldName});\n return wrapSmartNameQuery(mltQuery, smartNameFieldMappers, parseContext);", "filename": "src/main/java/org/elasticsearch/index/query/MoreLikeThisFieldQueryParser.java", "status": "modified" }, { "diff": "@@ -45,6 +45,7 @@ public class MoreLikeThisQueryBuilder extends BaseQueryBuilder implements Boosta\n private float boostTerms = -1;\n private float boost = -1;\n private String analyzer;\n+ private boolean failOnUnsupportedField = true;\n \n /**\n * Constructs a new more like this query which uses the \"_all\" field.\n@@ -165,6 +166,14 @@ public MoreLikeThisQueryBuilder boost(float boost) {\n return this;\n }\n \n+ /**\n+ * Whether to fail or return no result when this query is run against a field which is not supported such as binary/numeric fields.\n+ */\n+ public MoreLikeThisQueryBuilder failOnUnsupportedField(boolean fail) {\n+ failOnUnsupportedField = fail;\n+ return this;\n+ }\n+\n @Override\n protected void doXContent(XContentBuilder builder, Params params) throws IOException {\n builder.startObject(MoreLikeThisQueryParser.NAME);\n@@ -216,6 +225,9 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep\n if (analyzer != null) {\n builder.field(\"analyzer\", analyzer);\n }\n+ if (!failOnUnsupportedField) {\n+ builder.field(\"fail_on_unsupported_field\", failOnUnsupportedField);\n+ }\n builder.endObject();\n }\n }\n\\ No newline at end of file", "filename": "src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java", "status": "modified" }, { "diff": "@@ -23,11 +23,15 @@\n import com.google.common.collect.Sets;\n import org.apache.lucene.analysis.Analyzer;\n import org.apache.lucene.search.Query;\n+import org.elasticsearch.ElasticSearchIllegalArgumentException;\n+import org.elasticsearch.common.Strings;\n import org.elasticsearch.common.inject.Inject;\n import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;\n import org.elasticsearch.common.xcontent.XContentParser;\n+import org.elasticsearch.index.analysis.Analysis;\n \n import java.io.IOException;\n+import java.util.Iterator;\n import java.util.List;\n import java.util.Set;\n \n@@ -52,9 +56,10 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n XContentParser parser = parseContext.parser();\n \n MoreLikeThisQuery mltQuery = new MoreLikeThisQuery();\n- mltQuery.setMoreLikeFields(new String[]{parseContext.defaultField()});\n mltQuery.setSimilarity(parseContext.searchSimilarity());\n Analyzer analyzer = null;\n+ List<String> moreLikeFields = null;\n+ boolean failOnUnsupportedField = true;\n \n XContentParser.Token token;\n String currentFieldName = null;\n@@ -85,6 +90,8 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n analyzer = parseContext.analysisService().analyzer(parser.text());\n } else if (\"boost\".equals(currentFieldName)) {\n mltQuery.setBoost(parser.floatValue());\n+ } else if (\"fail_on_unsupported_field\".equals(currentFieldName) || \"failOnUnsupportedField\".equals(currentFieldName)) {\n+ failOnUnsupportedField = parser.booleanValue();\n } else {\n throw new QueryParsingException(parseContext.index(), \"[mlt] query does not support [\" + currentFieldName + \"]\");\n }\n@@ -96,11 +103,10 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n }\n mltQuery.setStopWords(stopWords);\n } else if (\"fields\".equals(currentFieldName)) {\n- List<String> fields = Lists.newArrayList();\n+ moreLikeFields = Lists.newLinkedList();\n while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {\n- fields.add(parseContext.indexName(parser.text()));\n+ moreLikeFields.add(parseContext.indexName(parser.text()));\n }\n- mltQuery.setMoreLikeFields(fields.toArray(new String[fields.size()]));\n } else {\n throw new QueryParsingException(parseContext.index(), \"[mlt] query does not support [\" + currentFieldName + \"]\");\n }\n@@ -110,15 +116,33 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n if (mltQuery.getLikeText() == null) {\n throw new QueryParsingException(parseContext.index(), \"more_like_this requires 'like_text' to be specified\");\n }\n- if (mltQuery.getMoreLikeFields() == null || mltQuery.getMoreLikeFields().length == 0) {\n- throw new QueryParsingException(parseContext.index(), \"more_like_this requires 'fields' to be specified\");\n- }\n \n if (analyzer == null) {\n analyzer = parseContext.mapperService().searchAnalyzer();\n }\n-\n mltQuery.setAnalyzer(analyzer);\n+\n+ if (moreLikeFields == null) {\n+ moreLikeFields = Lists.newArrayList(parseContext.defaultField());\n+ } else if (moreLikeFields.isEmpty()) {\n+ throw new QueryParsingException(parseContext.index(), \"more_like_this requires 'fields' to be non-empty\");\n+ }\n+\n+ for (Iterator<String> it = moreLikeFields.iterator(); it.hasNext(); ) {\n+ final String fieldName = it.next();\n+ if (!Analysis.generatesCharacterTokenStream(analyzer, fieldName)) {\n+ if (failOnUnsupportedField) {\n+ throw new ElasticSearchIllegalArgumentException(\"more_like_this doesn't support binary/numeric fields: [\" + fieldName + \"]\");\n+ } else {\n+ it.remove();\n+ }\n+ }\n+ }\n+ if (moreLikeFields.isEmpty()) {\n+ return null;\n+ }\n+ mltQuery.setMoreLikeFields(moreLikeFields.toArray(Strings.EMPTY_ARRAY));\n+\n return mltQuery;\n }\n }\n\\ No newline at end of file", "filename": "src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java", "status": "modified" }, { "diff": "@@ -0,0 +1,89 @@\n+/*\n+ * Licensed to ElasticSearch and Shay Banon under one\n+ * or more contributor license agreements. See the NOTICE file\n+ * distributed with this work for additional information\n+ * regarding copyright ownership. ElasticSearch licenses this\n+ * file to you under the Apache License, Version 2.0 (the\n+ * \"License\"); you may not use this file except in compliance\n+ * with the License. You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing,\n+ * software distributed under the License is distributed on an\n+ * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n+ * KIND, either express or implied. See the License for the\n+ * specific language governing permissions and limitations\n+ * under the License.\n+ */\n+\n+package org.elasticsearch.test.integration.flt;\n+\n+import org.elasticsearch.action.search.SearchPhaseExecutionException;\n+import org.elasticsearch.action.search.SearchResponse;\n+import org.elasticsearch.test.integration.AbstractSharedClusterTest;\n+import org.testng.annotations.Test;\n+\n+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;\n+import static org.elasticsearch.index.query.QueryBuilders.fuzzyLikeThisFieldQuery;\n+import static org.elasticsearch.index.query.QueryBuilders.fuzzyLikeThisQuery;\n+import static org.hamcrest.MatcherAssert.assertThat;\n+import static org.hamcrest.Matchers.equalTo;\n+import static org.testng.Assert.fail;\n+\n+/**\n+ *\n+ */\n+public class FuzzyLikeThisActionTests extends AbstractSharedClusterTest {\n+\n+ @Test\n+ // See issue https://github.com/elasticsearch/elasticsearch/issues/3252\n+ public void testNumericField() throws Exception {\n+ prepareCreate(\"test\").execute().actionGet();\n+ ensureGreen();\n+ client().prepareIndex(\"test\", \"type\", \"1\")\n+ .setSource(jsonBuilder().startObject().field(\"string_value\", \"lucene index\").field(\"int_value\", 1).endObject())\n+ .execute().actionGet();\n+ client().prepareIndex(\"test\", \"type\", \"2\")\n+ .setSource(jsonBuilder().startObject().field(\"string_value\", \"elasticsearch index\").field(\"int_value\", 42).endObject())\n+ .execute().actionGet();\n+\n+ refresh();\n+\n+ // flt query with no field -> OK\n+ SearchResponse searchResponse = client().prepareSearch().setQuery(fuzzyLikeThisQuery().likeText(\"index\")).execute().actionGet();\n+ assertThat(searchResponse.getFailedShards(), equalTo(0));\n+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));\n+\n+ // flt query with string fields\n+ searchResponse = client().prepareSearch().setQuery(fuzzyLikeThisQuery(\"string_value\").likeText(\"index\")).execute().actionGet();\n+ assertThat(searchResponse.getFailedShards(), equalTo(0));\n+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));\n+\n+ // flt query with at least a numeric field -> fail\n+ try {\n+ searchResponse = client().prepareSearch().setQuery(fuzzyLikeThisQuery(\"string_value\", \"int_value\").likeText(\"index\")).execute().actionGet();\n+ fail();\n+ } catch (SearchPhaseExecutionException e) {\n+ // OK\n+ }\n+\n+ // flt query with at least a numeric field but fail_on_unsupported_field set to false\n+ searchResponse = client().prepareSearch().setQuery(fuzzyLikeThisQuery(\"string_value\", \"int_value\").likeText(\"index\").failOnUnsupportedField(false)).execute().actionGet();\n+ assertThat(searchResponse.getFailedShards(), equalTo(0));\n+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));\n+\n+ // flt field query on a numeric field -> failure\n+ try {\n+ searchResponse = client().prepareSearch().setQuery(fuzzyLikeThisFieldQuery(\"int_value\").likeText(\"42\")).execute().actionGet();\n+ } catch (SearchPhaseExecutionException e) {\n+ // OK\n+ }\n+\n+ // flt field query on a numeric field but fail_on_unsupported_field set to false\n+ searchResponse = client().prepareSearch().setQuery(fuzzyLikeThisFieldQuery(\"int_value\").likeText(\"42\").failOnUnsupportedField(false)).execute().actionGet();\n+ assertThat(searchResponse.getFailedShards(), equalTo(0));\n+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(0L));\n+ }\n+\n+}", "filename": "src/test/java/org/elasticsearch/test/integration/flt/FuzzyLikeThisActionTests.java", "status": "added" }, { "diff": "@@ -19,24 +19,25 @@\n \n package org.elasticsearch.test.integration.mlt;\n \n-import static org.elasticsearch.client.Requests.indexAliasesRequest;\n-import static org.elasticsearch.client.Requests.indexRequest;\n-import static org.elasticsearch.client.Requests.moreLikeThisRequest;\n-import static org.elasticsearch.client.Requests.refreshRequest;\n-import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;\n-import static org.elasticsearch.index.query.FilterBuilders.termFilter;\n-import static org.hamcrest.MatcherAssert.assertThat;\n-import static org.hamcrest.Matchers.equalTo;\n-import static org.hamcrest.Matchers.notNullValue;\n-\n import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;\n+import org.elasticsearch.action.search.SearchPhaseExecutionException;\n import org.elasticsearch.action.search.SearchResponse;\n import org.elasticsearch.client.Client;\n import org.elasticsearch.common.settings.ImmutableSettings;\n import org.elasticsearch.common.xcontent.XContentFactory;\n import org.elasticsearch.test.integration.AbstractSharedClusterTest;\n import org.testng.annotations.Test;\n \n+import static org.elasticsearch.client.Requests.*;\n+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;\n+import static org.elasticsearch.index.query.FilterBuilders.termFilter;\n+import static org.elasticsearch.index.query.QueryBuilders.moreLikeThisFieldQuery;\n+import static org.elasticsearch.index.query.QueryBuilders.moreLikeThisQuery;\n+import static org.hamcrest.MatcherAssert.assertThat;\n+import static org.hamcrest.Matchers.equalTo;\n+import static org.hamcrest.Matchers.notNullValue;\n+import static org.testng.Assert.fail;\n+\n /**\n *\n */\n@@ -151,4 +152,67 @@ public void testMoreLikeThisIssueRoutingNotSerialized() throws Exception {\n assertThat(searchResponse, notNullValue());\n }\n \n+ @Test\n+ // See issue https://github.com/elasticsearch/elasticsearch/issues/3252\n+ public void testNumericField() throws Exception {\n+ prepareCreate(\"test\").execute().actionGet();\n+ ensureGreen();\n+ client().prepareIndex(\"test\", \"type\", \"1\")\n+ .setSource(jsonBuilder().startObject().field(\"string_value\", \"lucene index\").field(\"int_value\", 1).endObject())\n+ .execute().actionGet();\n+ client().prepareIndex(\"test\", \"type\", \"2\")\n+ .setSource(jsonBuilder().startObject().field(\"string_value\", \"elasticsearch index\").field(\"int_value\", 42).endObject())\n+ .execute().actionGet();\n+\n+ refresh();\n+\n+ // Implicit list of fields -> ignore numeric fields\n+ SearchResponse searchResponse = client().prepareMoreLikeThis(\"test\", \"type\", \"1\").setMinDocFreq(1).setMinTermFreq(1).execute().actionGet();\n+ assertThat(searchResponse.getFailedShards(), equalTo(0));\n+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));\n+\n+ // Explicit list of fields including numeric fields -> fail\n+ try {\n+ searchResponse = client().prepareMoreLikeThis(\"test\", \"type\", \"1\").setField(\"string_value\", \"int_value\").execute().actionGet();\n+ fail();\n+ } catch (SearchPhaseExecutionException e) {\n+ // OK\n+ }\n+\n+ // mlt query with no field -> OK\n+ searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery().likeText(\"index\").minTermFreq(1).minDocFreq(1)).execute().actionGet();\n+ assertThat(searchResponse.getFailedShards(), equalTo(0));\n+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));\n+\n+ // mlt query with string fields\n+ searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery(\"string_value\").likeText(\"index\").minTermFreq(1).minDocFreq(1)).execute().actionGet();\n+ assertThat(searchResponse.getFailedShards(), equalTo(0));\n+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));\n+\n+ // mlt query with at least a numeric field -> fail\n+ try {\n+ searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery(\"string_value\", \"int_value\").likeText(\"index\")).execute().actionGet();\n+ fail();\n+ } catch (SearchPhaseExecutionException e) {\n+ // OK\n+ }\n+\n+ // mlt query with at least a numeric field but fail_on_unsupported_field set to false\n+ searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery(\"string_value\", \"int_value\").likeText(\"index\").minTermFreq(1).minDocFreq(1).failOnUnsupportedField(false)).execute().actionGet();\n+ assertThat(searchResponse.getFailedShards(), equalTo(0));\n+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));\n+\n+ // mlt field query on a numeric field -> failure\n+ try {\n+ searchResponse = client().prepareSearch().setQuery(moreLikeThisFieldQuery(\"int_value\").likeText(\"42\").minTermFreq(1).minDocFreq(1)).execute().actionGet();\n+ } catch (SearchPhaseExecutionException e) {\n+ // OK\n+ }\n+\n+ // mlt field query on a numeric field but fail_on_unsupported_field set to false\n+ searchResponse = client().prepareSearch().setQuery(moreLikeThisFieldQuery(\"int_value\").likeText(\"42\").minTermFreq(1).minDocFreq(1).failOnUnsupportedField(false)).execute().actionGet();\n+ assertThat(searchResponse.getFailedShards(), equalTo(0));\n+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(0L));\n+ }\n+\n }", "filename": "src/test/java/org/elasticsearch/test/integration/mlt/MoreLikeThisActionTests.java", "status": "modified" }, { "diff": "@@ -185,9 +185,9 @@ public void testLatLonValues() throws Exception {\n .bytes());\n \n assertThat(doc.rootDoc().getField(\"point.lat\"), notNullValue());\n- assertThat(doc.rootDoc().getField(\"point.lat\").numericValue(), nullValue());\n+ assertThat(doc.rootDoc().getField(\"point.lat\").fieldType().stored(), is(false));\n assertThat(doc.rootDoc().getField(\"point.lon\"), notNullValue());\n- assertThat(doc.rootDoc().getField(\"point.lon\").numericValue(), nullValue());\n+ assertThat(doc.rootDoc().getField(\"point.lon\").fieldType().stored(), is(false));\n assertThat(doc.rootDoc().getField(\"point.geohash\"), nullValue());\n assertThat(doc.rootDoc().get(\"point\"), equalTo(\"1.2,1.3\"));\n }", "filename": "src/test/java/org/elasticsearch/test/unit/index/mapper/geo/LatLonMappingGeoPointTests.java", "status": "modified" } ] }
{ "body": "The `parent` param is not supported by `mget` either at the top-level or at the per-document level (while `routing` is):\n\n```\ncurl -XPUT 'localhost:9200/test_1?pretty=1' -d '\n{\n \"mappings\" : {\n \"test\" : {\n \"_parent\" : {\n \"type\" : \"foo\"\n }\n }\n }\n}\n'\n\ncurl -XGET 'localhost:9200/_cluster/health?wait_for_status=yellow&pretty=1'\n\ncurl -XPUT 'localhost:9200/test_1/test/1?parent=4&pretty=1' -d '\n{\n \"foo\" : \"bar\"\n}\n'\n\ncurl -XGET 'localhost:9200/test_1/test/_mget?pretty=1' -d '\n{\n \"docs\" : [\n {\n \"parent\" : \"4\",\n \"_id\" : \"1\"\n }\n ]\n}\n'\n\n# {\n# \"docs\" : [\n# {\n# \"_index\" : \"test_1\",\n# \"_id\" : \"1\",\n# \"_type\" : \"test\",\n# \"exists\" : false\n# }\n# ]\n# }\n\n\ncurl -XGET 'localhost:9200/test_1/test/_mget?parent=4&pretty=1' -d '\n{\n \"docs\" : [\n {\n \"_id\" : \"1\"\n }\n ]\n}\n'\n\n# {\n# \"docs\" : [\n# {\n# \"_index\" : \"test_1\",\n# \"_id\" : \"1\",\n# \"_type\" : \"test\",\n# \"exists\" : false\n# }\n# ]\n# }\n```\n", "comments": [ { "body": "Should probably just be supported at the `docs` level, not the top level\n", "created_at": "2013-07-01T11:10:41Z" } ], "number": 3274, "title": "Mget: no support for \"parent\"" }
{ "body": "When specifying the docs to be returned in a multi get request, a parent\nfield could not be specified, so that some docs seemingly did not exist,\neven though they did.\n\nThis fix behaves like the normal GetRequest and simply overwrites the\nrouting value if it has not yet been set.\n\nAlso a test for routing with mget has been added.\n\nCloses #3274\n", "number": 3277, "review_comments": [], "title": "Support for parent in multi get request" }
{ "commits": [ { "message": "Support for parent in multi get request\n\nWhen specifying the docs to be returned in a multi get request, a parent\nfield could not be specified, so that some docs seemingly did not exist,\neven though they did.\n\nThis fix behaves like the normal GetRequest and simply overwrites the\nrouting value if it has not yet been set.\n\nAlso a test for routing with mget has been added.\n\nCloses #3274" } ], "files": [ { "diff": "@@ -94,6 +94,13 @@ public String routing() {\n return this.routing;\n }\n \n+ public Item parent(String parent) {\n+ if (routing == null) {\n+ this.routing = parent;\n+ }\n+ return this;\n+ }\n+\n public Item fields(String... fields) {\n this.fields = fields;\n return this;\n@@ -246,6 +253,7 @@ public void add(@Nullable String defaultIndex, @Nullable String defaultType, @Nu\n String type = defaultType;\n String id = null;\n String routing = null;\n+ String parent = null;\n List<String> fields = null;\n while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {\n if (token == XContentParser.Token.FIELD_NAME) {\n@@ -259,6 +267,8 @@ public void add(@Nullable String defaultIndex, @Nullable String defaultType, @Nu\n id = parser.text();\n } else if (\"_routing\".equals(currentFieldName) || \"routing\".equals(currentFieldName)) {\n routing = parser.text();\n+ } else if (\"_parent\".equals(currentFieldName) || \"parent\".equals(currentFieldName)) {\n+ parent = parser.text();\n } else if (\"fields\".equals(currentFieldName)) {\n fields = new ArrayList<String>();\n fields.add(parser.text());\n@@ -278,7 +288,7 @@ public void add(@Nullable String defaultIndex, @Nullable String defaultType, @Nu\n } else {\n aFields = defaultFields;\n }\n- add(new Item(index, type, id).routing(routing).fields(aFields));\n+ add(new Item(index, type, id).routing(routing).fields(aFields).parent(parent));\n }\n } else if (\"ids\".equals(currentFieldName)) {\n while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {", "filename": "src/main/java/org/elasticsearch/action/get/MultiGetRequest.java", "status": "modified" }, { "diff": "@@ -51,4 +51,57 @@ public void testThatMgetShouldWorkWithOneIndexMissing() throws IOException {\n assertThat(mgetResponse.getResponses()[1].isFailed(), is(true));\n assertThat(mgetResponse.getResponses()[1].getFailure().getMessage(), is(\"[nonExistingIndex] missing\"));\n }\n+\n+ @Test\n+ public void testThatParentPerDocumentIsSupported() throws Exception {\n+ createIndex(\"test\");\n+ ensureYellow();\n+ client().admin().indices().preparePutMapping(\"test\").setType(\"test\").setSource(jsonBuilder()\n+ .startObject()\n+ .startObject(\"test\")\n+ .startObject(\"_parent\")\n+ .field(\"type\", \"foo\")\n+ .endObject()\n+ .endObject().\n+ endObject()\n+ ).execute().actionGet();\n+\n+ client().prepareIndex(\"test\", \"test\", \"1\").setParent(\"4\").setRefresh(true)\n+ .setSource(jsonBuilder().startObject().field(\"foo\", \"bar\").endObject())\n+ .execute().actionGet();\n+\n+ MultiGetResponse mgetResponse = client().prepareMultiGet()\n+ .add(new MultiGetRequest.Item(\"test\", \"test\", \"1\").parent(\"4\"))\n+ .add(new MultiGetRequest.Item(\"test\", \"test\", \"1\"))\n+ .execute().actionGet();\n+\n+ assertThat(mgetResponse.getResponses().length, is(2));\n+ assertThat(mgetResponse.getResponses()[0].isFailed(), is(false));\n+ assertThat(mgetResponse.getResponses()[0].getResponse().isExists(), is(true));\n+\n+ assertThat(mgetResponse.getResponses()[1].isFailed(), is(false));\n+ assertThat(mgetResponse.getResponses()[1].getResponse().isExists(), is(false));\n+ }\n+\n+ @Test\n+ public void testThatRoutingPerDocumentIsSupported() throws Exception {\n+ createIndex(\"test\");\n+ ensureYellow();\n+\n+ client().prepareIndex(\"test\", \"test\", \"1\").setRefresh(true).setRouting(\"bar\")\n+ .setSource(jsonBuilder().startObject().field(\"foo\", \"bar\").endObject())\n+ .execute().actionGet();\n+\n+ MultiGetResponse mgetResponse = client().prepareMultiGet()\n+ .add(new MultiGetRequest.Item(\"test\", \"test\", \"1\").routing(\"bar\"))\n+ .add(new MultiGetRequest.Item(\"test\", \"test\", \"1\"))\n+ .execute().actionGet();\n+\n+ assertThat(mgetResponse.getResponses().length, is(2));\n+ assertThat(mgetResponse.getResponses()[0].isFailed(), is(false));\n+ assertThat(mgetResponse.getResponses()[0].getResponse().isExists(), is(true));\n+\n+ assertThat(mgetResponse.getResponses()[1].isFailed(), is(false));\n+ assertThat(mgetResponse.getResponses()[1].getResponse().isExists(), is(false));\n+ }\n }", "filename": "src/test/java/org/elasticsearch/test/integration/mget/SimpleMgetTests.java", "status": "modified" } ] }
{ "body": "`mget` returns an error for each doc if the type or id is not found, but throws a top-level error if the index is not found. This seems inconsistent:\n\n```\ncurl -XPUT 'localhost:9200/test_1/test/1?pretty=1' -d '\n{\n \"foo\" : \"bar\"\n}\n'\n\ncurl -XGET 'localhost:9200/_mget?pretty=1' -d '\n{\n \"docs\" : [\n {\n \"_index\" : \"test_1\",\n \"_id\" : \"2\",\n \"_type\" : \"test\"\n },\n {\n \"_index\" : \"test_1\",\n \"_id\" : \"1\",\n \"_type\" : \"none\"\n },\n {\n \"_index\" : \"test_1\",\n \"_id\" : \"1\",\n \"_type\" : \"test\"\n }\n ]\n}\n'\n\n# {\n# \"docs\" : [\n# {\n# \"_index\" : \"test_1\",\n# \"_id\" : \"2\",\n# \"_type\" : \"test\",\n# \"exists\" : false\n# },\n# {\n# \"_index\" : \"test_1\",\n# \"_id\" : \"1\",\n# \"_type\" : \"none\",\n# \"exists\" : false\n# },\n# {\n# \"_source\" : {\n# \"foo\" : \"bar\"\n# },\n# \"_index\" : \"test_1\",\n# \"_id\" : \"1\",\n# \"_type\" : \"test\",\n# \"exists\" : true,\n# \"_version\" : 1\n# }\n# ]\n# }\n\ncurl -XGET 'localhost:9200/_mget?pretty=1' -d '\n{\n \"docs\" : [\n {\n \"_index\" : \"test_1\",\n \"_id\" : \"2\",\n \"_type\" : \"test\"\n },\n {\n \"_index\" : \"test_2\",\n \"_id\" : \"1\",\n \"_type\" : \"test\"\n },\n {\n \"_index\" : \"test_1\",\n \"_id\" : \"1\",\n \"_type\" : \"none\"\n },\n {\n \"_index\" : \"test_1\",\n \"_id\" : \"1\",\n \"_type\" : \"test\"\n }\n ]\n}\n'\n\n# {\n# \"status\" : 404,\n# \"error\" : \"IndexMissingException[[test_2] missing]\"\n# }\n```\n", "comments": [], "number": 3267, "title": "Mget aborting request if index missing" }
{ "body": "The MultiGet API stops with a IndexMissingException, if only one of all\nrequests tries to access a non existing index. This patch creates a\nfailure for this item without failing the whole request.\n\nCloses #3267\n", "number": 3272, "review_comments": [], "title": "Stop aborting of multiget requests in case of missing index" }
{ "commits": [ { "message": "Stop aborting of multiget requests in case of missing index\n\nThe MultiGet API stops with a IndexMissingException, if only one of all\nrequests tries to access a non existing index. This patch creates a\nfailure for this item without failing the whole request.\n\nCloses #3267" } ], "files": [ { "diff": "@@ -58,9 +58,16 @@ protected void doExecute(final MultiGetRequest request, final ActionListener<Mul\n \n clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ);\n \n+ final MultiGetItemResponse[] responses = new MultiGetItemResponse[request.items.size()];\n+\n Map<ShardId, MultiGetShardRequest> shardRequests = new HashMap<ShardId, MultiGetShardRequest>();\n for (int i = 0; i < request.items.size(); i++) {\n MultiGetRequest.Item item = request.items.get(i);\n+ if (!clusterState.metaData().hasConcreteIndex(item.index())) {\n+ responses[i] = new MultiGetItemResponse(null, new MultiGetResponse.Failure(item.index(), item.type(), item.id(), \"[\" + item.index() + \"] missing\"));\n+ continue;\n+ }\n+\n item.routing(clusterState.metaData().resolveIndexRouting(item.routing(), item.index()));\n item.index(clusterState.metaData().concreteIndex(item.index()));\n ShardId shardId = clusterService.operationRouting()\n@@ -77,7 +84,6 @@ protected void doExecute(final MultiGetRequest request, final ActionListener<Mul\n shardRequest.add(i, item.type(), item.id(), item.fields());\n }\n \n- final MultiGetItemResponse[] responses = new MultiGetItemResponse[request.items.size()];\n final AtomicInteger counter = new AtomicInteger(shardRequests.size());\n \n for (final MultiGetShardRequest shardRequest : shardRequests.values()) {", "filename": "src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java", "status": "modified" }, { "diff": "@@ -0,0 +1,54 @@\n+/*\n+ * Licensed to ElasticSearch and Shay Banon under one\n+ * or more contributor license agreements. See the NOTICE file\n+ * distributed with this work for additional information\n+ * regarding copyright ownership. ElasticSearch licenses this\n+ * file to you under the Apache License, Version 2.0 (the\n+ * \"License\"); you may not use this file except in compliance\n+ * with the License. You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing,\n+ * software distributed under the License is distributed on an\n+ * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n+ * KIND, either express or implied. See the License for the\n+ * specific language governing permissions and limitations\n+ * under the License.\n+ */\n+package org.elasticsearch.test.integration.mget;\n+\n+import org.elasticsearch.action.get.MultiGetRequest;\n+import org.elasticsearch.action.get.MultiGetResponse;\n+import org.elasticsearch.test.integration.AbstractSharedClusterTest;\n+import org.testng.annotations.Test;\n+\n+import java.io.IOException;\n+\n+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;\n+import static org.hamcrest.MatcherAssert.assertThat;\n+import static org.hamcrest.Matchers.is;\n+\n+public class SimpleMgetTests extends AbstractSharedClusterTest {\n+\n+ @Test\n+ public void testThatMgetShouldWorkWithOneIndexMissing() throws IOException {\n+ createIndex(\"test\");\n+ ensureYellow();\n+\n+ client().prepareIndex(\"test\", \"test\", \"1\").setSource(jsonBuilder().startObject().field(\"foo\", \"bar\").endObject()).setRefresh(true).execute().actionGet();\n+\n+ MultiGetResponse mgetResponse = client().prepareMultiGet()\n+ .add(new MultiGetRequest.Item(\"test\", \"test\", \"1\"))\n+ .add(new MultiGetRequest.Item(\"nonExistingIndex\", \"test\", \"1\"))\n+ .execute().actionGet();\n+ assertThat(mgetResponse.getResponses().length, is(2));\n+\n+ assertThat(mgetResponse.getResponses()[0].getIndex(), is(\"test\"));\n+ assertThat(mgetResponse.getResponses()[0].isFailed(), is(false));\n+\n+ assertThat(mgetResponse.getResponses()[1].getIndex(), is(\"nonExistingIndex\"));\n+ assertThat(mgetResponse.getResponses()[1].isFailed(), is(true));\n+ assertThat(mgetResponse.getResponses()[1].getFailure().getMessage(), is(\"[nonExistingIndex] missing\"));\n+ }\n+}", "filename": "src/test/java/org/elasticsearch/test/integration/mget/SimpleMgetTests.java", "status": "added" } ] }
{ "body": "According to the documentation; Index warmup can be disabled by setting index.warmer.enabled to false. It is supported as a realtime setting using update settings API.\n\nWhen I try to set it back to true;\n\n```\n{\"index.warmer.enabled\":\"true\"}\n```\n\nI get to following exception;\n\n```\n{\"error\":\"RemoteTransportException[[xxx][inet[/xx.x.xx.xx:9300]][indices/settings/update]]; nested: ElasticSearchIllegalArgumentException[Can't update non dynamic settings[[index.warmer.enabled]] for open indices[[public_20120701]]]; \",\"status\":400}\n```\n", "comments": [], "number": 3246, "title": "Index Warmer Setting is not dynamic anymore on 0.90.1" }
{ "body": "Even though proposed in the documentation, the realtime enabling/disabling of\nindex warmers was not supported. This commit adds support for\nindex.warmer.enabled as a dynamic setting.\n\nCloses #3246\n", "number": 3251, "review_comments": [], "title": "Make index.warmer.enabled setting dynamic" }
{ "commits": [ { "message": "Make index.warmer.enabled setting dynamic\n\nEven though proposed in the documentation, the realtime enabling/disabling of\nindex warmers was not supported. This commit adds support for\nindex.warmer.enabled as a dynamic setting.\n\nCloses #3246" } ], "files": [ { "diff": "@@ -39,6 +39,7 @@\n import org.elasticsearch.index.translog.TranslogService;\n import org.elasticsearch.index.translog.fs.FsTranslog;\n import org.elasticsearch.indices.ttl.IndicesTTLService;\n+import org.elasticsearch.indices.warmer.InternalIndicesWarmer;\n \n /**\n */\n@@ -113,6 +114,7 @@ public IndexDynamicSettingsModule() {\n indexDynamicSettings.addDynamicSetting(TranslogService.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, Validator.BYTES_SIZE);\n indexDynamicSettings.addDynamicSetting(TranslogService.INDEX_TRANSLOG_FLUSH_THRESHOLD_PERIOD, Validator.TIME);\n indexDynamicSettings.addDynamicSetting(TranslogService.INDEX_TRANSLOG_DISABLE_FLUSH);\n+ indexDynamicSettings.addDynamicSetting(InternalIndicesWarmer.INDEX_WARMER_ENABLED);\n }\n \n public void addDynamicSettings(String... settings) {", "filename": "src/main/java/org/elasticsearch/index/settings/IndexDynamicSettingsModule.java", "status": "modified" }, { "diff": "@@ -38,6 +38,8 @@\n */\n public class InternalIndicesWarmer extends AbstractComponent implements IndicesWarmer {\n \n+ public static final String INDEX_WARMER_ENABLED = \"index.warmer.enabled\";\n+\n private final ThreadPool threadPool;\n \n private final ClusterService clusterService;\n@@ -69,7 +71,7 @@ public void warm(final WarmerContext context) {\n if (indexMetaData == null) {\n return;\n }\n- if (!indexMetaData.settings().getAsBoolean(\"index.warmer.enabled\", settings.getAsBoolean(\"index.warmer.enabled\", true))) {\n+ if (!indexMetaData.settings().getAsBoolean(INDEX_WARMER_ENABLED, settings.getAsBoolean(INDEX_WARMER_ENABLED, true))) {\n return;\n }\n IndexService indexService = indicesService.indexService(context.shardId().index().name());", "filename": "src/main/java/org/elasticsearch/indices/warmer/InternalIndicesWarmer.java", "status": "modified" }, { "diff": "@@ -19,6 +19,7 @@\n \n package org.elasticsearch.test.integration.indices.wamer;\n \n+import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;\n import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersResponse;\n import org.elasticsearch.cluster.ClusterState;\n import org.elasticsearch.common.Priority;\n@@ -32,6 +33,8 @@\n \n import static org.hamcrest.MatcherAssert.assertThat;\n import static org.hamcrest.Matchers.equalTo;\n+import static org.hamcrest.Matchers.greaterThan;\n+import static org.hamcrest.Matchers.is;\n \n /**\n */\n@@ -175,4 +178,33 @@ public void deleteNonExistentIndexWarmerTest() {\n }\n }\n \n+ @Test // issue 3246\n+ public void ensureThatIndexWarmersCanBeChangedOnRuntime() throws Exception {\n+ client().admin().indices().prepareDelete().execute().actionGet();\n+ client().admin().indices().prepareCreate(\"test\")\n+ .setSettings(ImmutableSettings.settingsBuilder().put(\"index.number_of_shards\", 1))\n+ .execute().actionGet();\n+\n+ client().admin().cluster().prepareHealth(\"test\").setWaitForGreenStatus().execute().actionGet();\n+\n+ client().admin().indices().preparePutWarmer(\"custom_warmer\")\n+ .setSearchRequest(client().prepareSearch(\"test\").setTypes(\"test\").setQuery(QueryBuilders.matchAllQuery()))\n+ .execute().actionGet();\n+\n+ client().prepareIndex(\"test\", \"test\", \"1\").setSource(\"{ \\\"foo\\\" : \\\"bar\\\"}\").setRefresh(true).execute().actionGet();\n+\n+ client().admin().indices().prepareUpdateSettings(\"test\").setSettings(\"{ \\\"index.warmer.enabled\\\": false}\").execute().actionGet();\n+\n+ long warmerRunsAfterDisabling = getWarmerRuns();\n+ assertThat(warmerRunsAfterDisabling, is(greaterThan(1L)));\n+\n+ client().prepareIndex(\"test\", \"test\", \"2\").setSource(\"{ \\\"foo2\\\" : \\\"bar2\\\"}\").setRefresh(true).execute().actionGet();\n+\n+ assertThat(warmerRunsAfterDisabling, is(getWarmerRuns()));\n+ }\n+\n+ private long getWarmerRuns() {\n+ IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats(\"test\").clear().setWarmer(true).execute().actionGet();\n+ return indicesStatsResponse.getIndex(\"test\").getPrimaries().warmer.total();\n+ }\n }", "filename": "src/test/java/org/elasticsearch/test/integration/indices/wamer/SimpleIndicesWarmerTests.java", "status": "modified" } ] }
{ "body": "If I precisely follow the docs & send the following, I get a successful request (200).\n\n```\ncurl -XPOST 'localhost:9200/_search' -d '{\n \"query\": {\n \"query_string\": {\n \"query\": \"hello\"\n }\n },\n \"suggest\": {\n \"suggest\": {\n \"text\": \"*:*\",\n \"term\": {\n \"field\": \"_all\"\n }\n }\n }\n}'\n```\n\nHowever, if I just change the ordering of the `text` & `term` keys, I get the \"Required text option is missing error\" (& a resulting HTTP 500), even though it is present in the JSON. AFAICT, the ordering within the JSON object shouldn't matter.\n\n```\ncurl -XPOST 'localhost:9200/_search' -d '{\n \"query\": {\n \"query_string\": {\n \"query\": \"hello\"\n }\n },\n \"suggest\": {\n \"suggest\": {\n \"term\": {\n \"field\": \"_all\"\n },\n \"text\": \"*:*\"\n }\n }\n}'\n```\n", "comments": [], "number": 3247, "title": "Invalid \"ElasticSearchIllegalArgumentException[The required text option is missing]\" Error" }
{ "body": "The current implementation of parsing suggestions executed inside of the\nthe pull parser - which resulted in being reliable of the order of the\nelements in the request. This fix changes the behaviour to parse the\nrelevant parts of the request first and then execute all the suggestions\nafterwards, so we can be sure that every information has been extracted\nfrom the request before execution.\n\nI did not create a test, as I believe this can only be tested against the REST interface (where we do not have any tests at the moment)\n\nCloses #3247\n", "number": 3250, "review_comments": [], "title": "Dont execute suggest before parsing the full request" }
{ "commits": [ { "message": "Dont execute suggest before parsing the full request\n\nThe current implementation of parsing suggestions executed inside of the\nthe pull parser - which resulted in being reliable of the order of the\nelements in the request. This fix changes the behaviour to parse the\nrelevant parts of the request first and then execute all the suggestions\nafterwards, so we can be sure that every information has been extracted\nfrom the request before execution.\n\nCloses #3247" } ], "files": [ { "diff": "@@ -18,10 +18,6 @@\n */\n package org.elasticsearch.search.suggest;\n \n-import org.elasticsearch.search.SearchShardTarget;\n-\n-import java.io.IOException;\n-\n import org.apache.lucene.util.BytesRef;\n import org.elasticsearch.ElasticSearchIllegalArgumentException;\n import org.elasticsearch.common.inject.Inject;\n@@ -31,6 +27,11 @@\n import org.elasticsearch.search.internal.SearchContext;\n import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;\n \n+import java.io.IOException;\n+import java.util.Map;\n+\n+import static com.google.common.collect.Maps.newHashMap;\n+\n /**\n *\n */\n@@ -52,6 +53,8 @@ public SuggestionSearchContext parseInternal(XContentParser parser, MapperServic\n SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext();\n BytesRef globalText = null;\n String fieldName = null;\n+ Map<String, SuggestionContext> suggestionContexts = newHashMap();\n+\n XContentParser.Token token;\n while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {\n if (token == XContentParser.Token.FIELD_NAME) {\n@@ -65,6 +68,7 @@ public SuggestionSearchContext parseInternal(XContentParser parser, MapperServic\n } else if (token == XContentParser.Token.START_OBJECT) {\n String suggestionName = fieldName;\n BytesRef suggestText = null;\n+ SuggestionContext suggestionContext = null;\n \n while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {\n if (token == XContentParser.Token.FIELD_NAME) {\n@@ -83,19 +87,27 @@ public SuggestionSearchContext parseInternal(XContentParser parser, MapperServic\n throw new ElasticSearchIllegalArgumentException(\"Suggester[\" + fieldName + \"] not supported\");\n }\n final SuggestContextParser contextParser = suggesters.get(fieldName).getContextParser();\n- SuggestionContext suggestion = contextParser.parse(parser, mapperService);\n- suggestion.setText(suggestText);\n- suggestion.setShard(shardId);\n- suggestion.setIndex(index);\n- SuggestUtils.verifySuggestion(mapperService, globalText, suggestion);\n- suggestionSearchContext.addSuggestion(suggestionName, suggestion);\n+ suggestionContext = contextParser.parse(parser, mapperService);\n }\n }\n+ if (suggestionContext != null) {\n+ suggestionContext.setText(suggestText);\n+ suggestionContexts.put(suggestionName, suggestionContext);\n+ }\n+\n }\n }\n- return suggestionSearchContext;\n- }\n \n+ for (Map.Entry<String, SuggestionContext> entry : suggestionContexts.entrySet()) {\n+ String suggestionName = entry.getKey();\n+ SuggestionContext suggestionContext = entry.getValue();\n \n- \n+ suggestionContext.setShard(shardId);\n+ suggestionContext.setIndex(index);\n+ SuggestUtils.verifySuggestion(mapperService, globalText, suggestionContext);\n+ suggestionSearchContext.addSuggestion(suggestionName, suggestionContext);\n+ }\n+\n+ return suggestionSearchContext;\n+ }\n }", "filename": "src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java", "status": "modified" } ] }
{ "body": "The `geo_shape` filter seems to be unable to handle multiple `geo_shape` fields in a single document if this document is used as indexed filter.\n\nAssume a mapping with multiple `geo_shape` fields:\n\n```\n{\n \"type1\" : {\n \"properties\" : {\n \"location1\" : {\n \"type\" : \"geo_shape\"\n },\n \"location2\" : {\n \"type\" : \"geo_shape\"\n }\n }\n }\n}\n```\n\nand a document\n\n```\n{\n \"location1\" : {\n \"type\":\"polygon\",\n \"coordinates\":[[[-10,-10],[10,-10],[10,10],[-10,10],[-10,-10]]]\n },\n \"location2\" : {\n \"type\":\"polygon\",\n \"coordinates\":[[[-20,-20],[20,-20],[20,20],[-20,20],[-20,-20]]]\n }\n}\n```\n\nIf a `geo_shape` filter is applied to the `location2` field\n\n```\n{\n \"geo_shape\": {\n \"location2\": {\n \"indexed_shape\": { \n \"id\": \"1\",\n \"type\": \"type1\",\n \"index\": \"test\",\n \"shape_field_name\": \"location2\"\n }\n }\n }\n}\n```\n\nparsing fails with\n\n```\nElasticSearchIllegalStateException[Shape with name [1] found but missing location2 field];\n```\n", "comments": [ { "body": "Is this fix included in the 0.90.3 release?\nI am currently experiencing the same error when both a geo_shape and a geo_point field are present in the same document.\n\n**Edit: using 0.90.3**\n", "created_at": "2013-09-07T19:25:30Z" }, { "body": "this has never been backported... I will backport!\n", "created_at": "2013-09-07T19:42:33Z" }, { "body": "pushed to `0.90` branch. This will be part of `0.90.4`\n", "created_at": "2013-09-07T20:16:56Z" }, { "body": "When is the, approximate, anticipated release of 0.90.4?\n", "created_at": "2013-09-08T00:04:22Z" }, { "body": "> When is the, approximate, anticipated release of 0.90.4?\n\nwe plan on a release early next week \n", "created_at": "2013-09-08T05:52:21Z" } ], "number": 3242, "title": "Geoshape filter can't handle multiple shapes" }
{ "body": "Fix parsing the field names in the `ShapeFetchService` \n\ncloses #3242\n", "number": 3243, "review_comments": [], "title": "Fix ShapeFetchService" }
{ "commits": [ { "message": "fixed ShapeFetchService. closes #3242" } ], "files": [ { "diff": "@@ -73,6 +73,7 @@ public Shape fetch(String id, String type, String index, String shapeField) thro\n parser.nextToken();\n return GeoJSONShapeParser.parse(parser);\n } else {\n+ parser.nextToken();\n parser.skipChildren();\n }\n }", "filename": "src/main/java/org/elasticsearch/index/search/shape/ShapeFetchService.java", "status": "modified" }, { "diff": "@@ -25,10 +25,12 @@\n import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;\n import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery;\n import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;\n+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;\n import static org.hamcrest.MatcherAssert.assertThat;\n import static org.hamcrest.Matchers.equalTo;\n import static org.hamcrest.Matchers.instanceOf;\n \n+import java.io.IOException;\n import java.util.List;\n import java.util.Map;\n \n@@ -182,6 +184,44 @@ public void testIndexedShapeReference() throws Exception {\n assertThat(searchResponse.getHits().getAt(0).id(), equalTo(\"1\"));\n }\n \n+ @Test\n+ public void testParsingMultipleShapes() throws IOException {\n+ String mapping = XContentFactory.jsonBuilder()\n+ .startObject()\n+ .startObject(\"type1\")\n+ .startObject(\"properties\")\n+ .startObject(\"location1\")\n+ .field(\"type\", \"geo_shape\")\n+ .endObject()\n+ .startObject(\"location2\")\n+ .field(\"type\", \"geo_shape\")\n+ .endObject()\n+ .endObject()\n+ .endObject()\n+ .endObject()\n+ .string();\n+ \n+ prepareCreate(\"test\").addMapping(\"type1\", mapping).execute().actionGet();\n+ ensureYellow();\n+\n+ String p1 = \"\\\"location1\\\" : {\\\"type\\\":\\\"polygon\\\", \\\"coordinates\\\":[[[-10,-10],[10,-10],[10,10],[-10,10],[-10,-10]]]}\";\n+ String p2 = \"\\\"location2\\\" : {\\\"type\\\":\\\"polygon\\\", \\\"coordinates\\\":[[[-20,-20],[20,-20],[20,20],[-20,20],[-20,-20]]]}\";\n+ String o1 = \"{\" + p1 + \", \" + p2 + \"}\";\n+\n+ client().prepareIndex(\"test\", \"type1\", \"1\").setSource(o1).execute().actionGet();\n+ client().admin().indices().prepareRefresh(\"test\").execute().actionGet();\n+\n+ String filter = \"{\\\"geo_shape\\\": {\\\"location2\\\": {\\\"indexed_shape\\\": {\" \n+ + \"\\\"id\\\": \\\"1\\\",\"\n+ + \"\\\"type\\\": \\\"type1\\\",\"\n+ + \"\\\"index\\\": \\\"test\\\",\"\n+ + \"\\\"shape_field_name\\\": \\\"location2\\\"\"\n+ + \"}}}}\";\n+\n+ SearchResponse result = client().prepareSearch(\"test\").setQuery(QueryBuilders.matchAllQuery()).setFilter(filter).execute().actionGet();\n+ assertHitCount(result, 1);\n+ }\n+ \n @Test // Issue 2944\n public void testThatShapeIsReturnedEvenWhenExclusionsAreSet() throws Exception {\n String mapping = XContentFactory.jsonBuilder().startObject().startObject(\"type1\")", "filename": "src/test/java/org/elasticsearch/test/integration/search/geo/GeoShapeIntegrationTests.java", "status": "modified" } ] }
{ "body": "The format of the geohash filter does not match the format other filters in the REST API. The fieldnames within the filter should match the fieldnames within the mapping.\nAlso the it should be possible to define the geohash_cell by a point in lat/lon format. Such a point can than transformed into a geohash.\nIn some corner cases the geohash_cell filter fails. i.e. the southern neighbor of cell `12b`.\n", "comments": [], "number": 3229, "title": "Geohash filter format" }
{ "body": "The `geohash_cell` filter now adapts the format of other geo-filters. The oject fieldnames match the fieldnames document names automatically. This invalidates the `field` field in previeous versions. The value these fields value is a `geo_point` value (all formats supported) which is internally translated to a geohash. Since those points alway have a maximum precision (level 12) a `precision` definition has been included. This precision can either be defined as _length_ of the geohash-string or as _distance_. It's assumed the a distance without any unit is a geohash-length.\n\n```\nGET 'http://127.0.0.1:9200/locations/_search?pretty=true' -d '{\n \"query\": {\n \"match_all\":{}\n },\n \"filter\": {\n \"geohash_cell\": {\n \"pin\": {\n \"lat\": 13.4080,\n \"lon\": 52.5186\n },\n \"precision\": 3,\n \"neighbors\": true\n }\n }\n}'\n```\n\nCloses #3229\n", "number": 3230, "review_comments": [ { "body": "doesn't this need to be set somewhere? i.e. back to the to the geohash. \n\nAlso, I think that the GeoPoint itself needs to be used as a varialbe and not the geohash, since generating the geohash will be based on the precision specified? For example, someone might define a very high precision, and then the geohash method here uses the default precision which might not be good enough...\n\nI think that we can simply call GeoPoint.parse(parsers) without the check on VALUE_STRING, and then use the GeoPoint to generate the has based on the precision later outside of the parsing loop.\n", "created_at": "2013-06-24T22:20:21Z" } ], "title": "Geohash filter enhancement" }
{ "commits": [ { "message": "The `geohash_cell` filter now adapts the format of other geo-filters. The oject fieldnames match the fieldnames document names automatically. This invalidates the `field` field in previeous versions. The value these fields value is a `geo_point` value (all formats supported) which is internally translated to a geohash. Since those points alway have a maximum precision (level 12) a `precision` definition has been included. This precision can either be defined as *length* of the geohash-string or as *distance*. It's assumed the a distance without any unit is a geohash-length.\n\n```\nGET 'http://127.0.0.1:9200/locations/_search?pretty=true' -d '{\n \"query\": {\n \"match_all\":{}\n },\n \"filter\": {\n \"geohash_cell\": {\n\t\t\t\"pin\": {\n\t\t\t\t\"lat\": 13.4080,\n\t\t\t\t\"lon\": 52.5186\n\t\t\t},\n \"precision\": 3,\n \"neighbors\": true\n }\n }\n}'\n```\nCloses #3229" } ], "files": [ { "diff": "@@ -170,7 +170,12 @@ private final static String neighbor(String geohash, int level, int dx, int dy)\n if (nx >= 0 && nx <= xLimit && ny >= 0 && ny < yLimit) {\n return geohash.substring(0, level - 1) + encode(nx, ny);\n } else {\n- return neighbor(geohash, level - 1, dx, dy) + encode(nx, ny);\n+ String neighbor = neighbor(geohash, level - 1, dx, dy);\n+ if(neighbor != null) {\n+ return neighbor + encode(nx, ny); \n+ } else {\n+ return null;\n+ }\n }\n }\n }", "filename": "src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java", "status": "modified" }, { "diff": "@@ -19,11 +19,21 @@\n \n package org.elasticsearch.common.geo;\n \n+import java.io.IOException;\n+\n+import org.elasticsearch.ElasticSearchParseException;\n+import org.elasticsearch.common.xcontent.XContentParser;\n+import org.elasticsearch.common.xcontent.XContentParser.Token;\n+import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;\n+\n /**\n *\n */\n public class GeoPoint {\n \n+ public static final String LATITUDE = GeoPointFieldMapper.Names.LAT;\n+ public static final String LONGITUDE = GeoPointFieldMapper.Names.LON;\n+ \n private double lat;\n private double lon;\n \n@@ -123,4 +133,92 @@ public int hashCode() {\n public String toString() {\n return \"[\" + lat + \", \" + lon + \"]\";\n }\n+ \n+ /**\n+ * Parse a {@link GeoPoint} with a {@link XContentParser}:\n+ * \n+ * @param parser {@link XContentParser} to parse the value from\n+ * @return new {@link GeoPoint} parsed from the parse\n+ * \n+ * @throws IOException\n+ * @throws ElasticSearchParseException\n+ */\n+ public static GeoPoint parse(XContentParser parser) throws IOException, ElasticSearchParseException {\n+ return parse(parser, new GeoPoint());\n+ }\n+\n+ /**\n+ * Parse a {@link GeoPoint} with a {@link XContentParser}. A geopoint has one of the following forms:\n+ * \n+ * <ul>\n+ * <li>Object: <pre>{&quot;lat&quot;: <i>&lt;latitude&gt;</i>, &quot;lon&quot;: <i>&lt;longitude&gt;</i>}</pre></li>\n+ * <li>String: <pre>&quot;<i>&lt;latitude&gt;</i>,<i>&lt;longitude&gt;</i>&quot;</pre></li>\n+ * <li>Geohash: <pre>&quot;<i>&lt;geohash&gt;</i>&quot;</pre></li>\n+ * <li>Array: <pre>[<i>&lt;longitude&gt;</i>,<i>&lt;latitude&gt;</i>]</pre></li>\n+ * </ul>\n+ * \n+ * @param parser {@link XContentParser} to parse the value from\n+ * @param point A {@link GeoPoint} that will be reset by the values parsed\n+ * @return new {@link GeoPoint} parsed from the parse\n+ * \n+ * @throws IOException\n+ * @throws ElasticSearchParseException\n+ */\n+ public static GeoPoint parse(XContentParser parser, GeoPoint point) throws IOException, ElasticSearchParseException {\n+ if(parser.currentToken() == Token.START_OBJECT) {\n+ while(parser.nextToken() != Token.END_OBJECT) {\n+ if(parser.currentToken() == Token.FIELD_NAME) {\n+ String field = parser.text();\n+ if(LATITUDE.equals(field)) {\n+ if(parser.nextToken() == Token.VALUE_NUMBER) {\n+ point.resetLat(parser.doubleValue());\n+ } else {\n+ throw new ElasticSearchParseException(\"latitude must be a number\");\n+ }\n+ } else if (LONGITUDE.equals(field)) {\n+ if(parser.nextToken() == Token.VALUE_NUMBER) {\n+ point.resetLon(parser.doubleValue());\n+ } else {\n+ throw new ElasticSearchParseException(\"latitude must be a number\");\n+ }\n+ } else {\n+ throw new ElasticSearchParseException(\"field must be either '\"+LATITUDE+\"' or '\"+LONGITUDE+\"'\");\n+ }\n+ } else {\n+ throw new ElasticSearchParseException(\"Token '\"+parser.currentToken()+\"' not allowed\");\n+ }\n+ }\n+ return point;\n+ } else if(parser.currentToken() == Token.START_ARRAY) {\n+ int element = 0;\n+ while(parser.nextToken() != Token.END_ARRAY) {\n+ if(parser.currentToken() == Token.VALUE_NUMBER) {\n+ element++;\n+ if(element == 1) {\n+ point.resetLon(parser.doubleValue());\n+ } else if(element == 2) {\n+ point.resetLat(parser.doubleValue());\n+ } else {\n+ throw new ElasticSearchParseException(\"only two values allowed\");\n+ }\n+ } else {\n+ throw new ElasticSearchParseException(\"Numeric value expected\");\n+ }\n+ }\n+ return point;\n+ } else if(parser.currentToken() == Token.VALUE_STRING) {\n+ String data = parser.text();\n+ int comma = data.indexOf(',');\n+ if(comma > 0) {\n+ double lat = Double.parseDouble(data.substring(0, comma).trim());\n+ double lon = Double.parseDouble(data.substring(comma+1).trim());\n+ return point.reset(lat, lon);\n+ } else {\n+ point.resetFromGeoHash(data);\n+ return point;\n+ }\n+ } else {\n+ throw new ElasticSearchParseException(\"geo_point expected\");\n+ }\n+ }\n }", "filename": "src/main/java/org/elasticsearch/common/geo/GeoPoint.java", "status": "modified" }, { "diff": "@@ -21,6 +21,7 @@\n \n import com.spatial4j.core.shape.Shape;\n import org.elasticsearch.common.Nullable;\n+import org.elasticsearch.common.geo.GeoPoint;\n import org.elasticsearch.common.geo.ShapeRelation;\n \n /**\n@@ -372,6 +373,18 @@ public static GeohashFilter.Builder geoHashFilter(String fieldname, String geoha\n return new GeohashFilter.Builder(fieldname, geohash);\n }\n \n+ /**\n+ * A filter based on a bounding box defined by geohash. The field this filter is applied to\n+ * must have <code>{&quot;type&quot;:&quot;geo_point&quot;, &quot;geohash&quot;:true}</code>\n+ * to work.\n+ *\n+ * @param fieldname The geopoint field name.\n+ * @param point a geopoint within the geohash bucket\n+ */\n+ public static GeohashFilter.Builder geoHashFilter(String fieldname, GeoPoint point) {\n+ return new GeohashFilter.Builder(fieldname, point);\n+ }\n+\n /**\n * A filter based on a bounding box defined by geohash. The field this filter is applied to\n * must have <code>{&quot;type&quot;:&quot;geo_point&quot;, &quot;geohash&quot;:true}</code>", "filename": "src/main/java/org/elasticsearch/index/query/FilterBuilders.java", "status": "modified" }, { "diff": "@@ -26,7 +26,9 @@\n import org.elasticsearch.common.Strings;\n import org.elasticsearch.common.geo.GeoHashUtils;\n import org.elasticsearch.common.geo.GeoPoint;\n+import org.elasticsearch.common.geo.GeoUtils;\n import org.elasticsearch.common.inject.Inject;\n+import org.elasticsearch.common.unit.DistanceUnit;\n import org.elasticsearch.common.xcontent.XContentBuilder;\n import org.elasticsearch.common.xcontent.XContentParser;\n import org.elasticsearch.common.xcontent.XContentParser.Token;\n@@ -56,9 +58,8 @@\n public class GeohashFilter {\n \n public static final String NAME = \"geohash_cell\";\n- public static final String FIELDNAME = \"field\";\n- public static final String GEOHASH = \"geohash\";\n public static final String NEIGHBORS = \"neighbors\";\n+ public static final String PRECISION = \"precision\";\n \n /**\n * Create a new geohash filter for a given set of geohashes. In general this method\n@@ -90,15 +91,23 @@ public static Filter create(QueryParseContext context, GeoPointFieldMapper field\n * <code>false</code>.\n */\n public static class Builder extends BaseFilterBuilder {\n-\n+ // we need to store the geohash rather than the corresponding point,\n+ // because a transformation from a geohash to a point an back to the\n+ // geohash will extend the accuracy of the hash to max precision\n+ // i.e. by filing up with z's.\n private String fieldname;\n private String geohash;\n+ private int levels = -1;\n private boolean neighbors;\n \n public Builder(String fieldname) {\n this(fieldname, null, false);\n }\n \n+ public Builder(String fieldname, GeoPoint point) {\n+ this(fieldname, point.geohash(), false);\n+ }\n+\n public Builder(String fieldname, String geohash) {\n this(fieldname, geohash, false);\n }\n@@ -110,11 +119,31 @@ public Builder(String fieldname, String geohash, boolean neighbors) {\n this.neighbors = neighbors;\n }\n \n+ public Builder setPoint(GeoPoint point) {\n+ this.geohash = point.getGeohash();\n+ return this;\n+ }\n+\n+ public Builder setPoint(double lat, double lon) {\n+ this.geohash = GeoHashUtils.encode(lat, lon);\n+ return this;\n+ }\n+\n public Builder setGeohash(String geohash) {\n this.geohash = geohash;\n return this;\n }\n \n+ public Builder setPrecision(int levels) {\n+ this.levels = levels;\n+ return this;\n+ }\n+\n+ public Builder setPrecision(String precision) {\n+ double meters = DistanceUnit.parse(precision, DistanceUnit.METERS, DistanceUnit.METERS);\n+ return setPrecision(GeoUtils.geoHashLevelsForPrecision(meters));\n+ }\n+ \n public Builder setNeighbors(boolean neighbors) {\n this.neighbors = neighbors;\n return this;\n@@ -128,11 +157,14 @@ public Builder setField(String fieldname) {\n @Override\n protected void doXContent(XContentBuilder builder, Params params) throws IOException {\n builder.startObject(NAME);\n- builder.field(FIELDNAME, fieldname);\n- builder.field(GEOHASH, geohash);\n if (neighbors) {\n builder.field(NEIGHBORS, neighbors);\n }\n+ if(levels > 0) {\n+ builder.field(PRECISION, levels);\n+ }\n+ builder.field(fieldname, geohash);\n+\n builder.endObject();\n }\n }\n@@ -154,28 +186,43 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar\n \n String fieldName = null;\n String geohash = null;\n+ int levels = -1;\n boolean neighbors = false;\n \n XContentParser.Token token;\n if ((token = parser.currentToken()) != Token.START_OBJECT) {\n throw new ElasticSearchParseException(NAME + \" must be an object\");\n }\n \n- while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {\n+ while ((token = parser.nextToken()) != Token.END_OBJECT) {\n if (token == Token.FIELD_NAME) {\n String field = parser.text();\n \n- if (FIELDNAME.equals(field)) {\n- parser.nextToken();\n- fieldName = parser.text();\n- } else if (GEOHASH.equals(field)) {\n- parser.nextToken();\n- geohash = parser.text();\n+ if (PRECISION.equals(field)) {\n+ token = parser.nextToken();\n+ if(token == Token.VALUE_NUMBER) {\n+ levels = parser.intValue();\n+ } else if(token == Token.VALUE_STRING) {\n+ double meters = DistanceUnit.parse(parser.text(), DistanceUnit.METERS, DistanceUnit.METERS);\n+ levels = GeoUtils.geoHashLevelsForPrecision(meters);\n+ }\n } else if (NEIGHBORS.equals(field)) {\n parser.nextToken();\n neighbors = parser.booleanValue();\n } else {\n- throw new ElasticSearchParseException(\"unexpected field [\" + field + \"]\");\n+ fieldName = field;\n+ token = parser.nextToken();\n+ if(token == Token.VALUE_STRING) {\n+ // A string indicates either a gehash or a lat/lon string\n+ String location = parser.text();\n+ if(location.indexOf(\",\")>0) {\n+ geohash = GeoPoint.parse(parser).geohash();\n+ } else {\n+ geohash = location;\n+ }\n+ } else {\n+ geohash = GeoPoint.parse(parser).geohash();\n+ }\n }\n } else {\n throw new ElasticSearchParseException(\"unexpected token [\" + token + \"]\");\n@@ -194,6 +241,11 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar\n \n GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper.GeoStringFieldMapper) mapper).geoMapper();\n \n+ if(levels > 0) {\n+ int len = Math.min(levels, geohash.length());\n+ geohash = geohash.substring(0, len);\n+ }\n+\n if (neighbors) {\n return create(parseContext, geoMapper, geohash, GeoHashUtils.neighbors(geohash));\n } else {", "filename": "src/main/java/org/elasticsearch/index/query/GeohashFilter.java", "status": "modified" }, { "diff": "@@ -441,11 +441,14 @@ public void bulktest() throws Exception {\n \n @Test\n public void testGeoHashFilter() throws IOException {\n- String geohash = randomhash(12);\n- List<String> neighbors = GeoHashUtils.neighbors(geohash);\n-\n+ String geohash = randomhash(10);\n logger.info(\"Testing geohash boundingbox filter for [{}]\", geohash);\n+\n+ List<String> neighbors = GeoHashUtils.neighbors(geohash);\n+ List<String> parentNeighbors = GeoHashUtils.neighbors(geohash.substring(0, geohash.length() - 1));\n+ \n logger.info(\"Neighbors {}\", neighbors);\n+ logger.info(\"Parent Neighbors {}\", parentNeighbors);\n \n String mapping = XContentFactory.jsonBuilder()\n .startObject()\n@@ -477,20 +480,38 @@ public void testGeoHashFilter() throws IOException {\n client().prepareIndex(\"locations\", \"location\", \"p\").setCreate(true).setSource(\"{\\\"pin\\\":\\\"\" + geohash.substring(0, geohash.length() - 1) + \"\\\"}\").execute().actionGet();\n \n // index neighbors\n- List<String> parentNeighbors = GeoHashUtils.neighbors(geohash.substring(0, geohash.length() - 1));\n for (int i = 0; i < parentNeighbors.size(); i++) {\n client().prepareIndex(\"locations\", \"location\", \"p\" + i).setCreate(true).setSource(\"{\\\"pin\\\":\\\"\" + parentNeighbors.get(i) + \"\\\"}\").execute().actionGet();\n }\n \n client().admin().indices().prepareRefresh(\"locations\").execute().actionGet();\n \n- // Result of this geohash search should contain the geohash only \n- SearchResponse results1 = client().prepareSearch(\"locations\").setQuery(QueryBuilders.matchAllQuery()).setFilter(\"{\\\"geohash_cell\\\": {\\\"field\\\": \\\"pin\\\", \\\"geohash\\\": \\\"\" + geohash + \"\\\", \\\"neighbors\\\": false}}\").execute().actionGet();\n+ // Result of this geohash search should contain the geohash only\n+ SearchResponse results1 = client().prepareSearch(\"locations\").setQuery(QueryBuilders.matchAllQuery()).setFilter(\"{\\\"geohash_cell\\\": {\\\"pin\\\": \\\"\" + geohash + \"\\\", \\\"neighbors\\\": false}}\").execute().actionGet();\n assertHitCount(results1, 1);\n \n- SearchResponse results2 = client().prepareSearch(\"locations\").setQuery(QueryBuilders.matchAllQuery()).setFilter(\"{\\\"geohash_cell\\\": {\\\"field\\\": \\\"pin\\\", \\\"geohash\\\": \\\"\" + geohash.substring(0, geohash.length() - 1) + \"\\\", \\\"neighbors\\\": true}}\").execute().actionGet();\n // Result of the parent query should contain the parent it self, its neighbors, the child and all its neighbors\n+ SearchResponse results2 = client().prepareSearch(\"locations\").setQuery(QueryBuilders.matchAllQuery()).setFilter(\"{\\\"geohash_cell\\\": {\\\"pin\\\": \\\"\" + geohash.substring(0, geohash.length() - 1) + \"\\\", \\\"neighbors\\\": true}}\").execute().actionGet();\n assertHitCount(results2, 2 + neighbors.size() + parentNeighbors.size());\n+\n+ // Testing point formats and precision\n+ GeoPoint point = GeoHashUtils.decode(geohash);\n+ int precision = geohash.length();\n+\n+ logger.info(\"Testing lat/lon format\");\n+ String pointTest1 = \"{\\\"geohash_cell\\\": {\\\"pin\\\": {\\\"lat\\\": \" + point.lat() + \",\\\"lon\\\": \" + point.lon() + \"},\\\"precision\\\": \" + precision + \",\\\"neighbors\\\": true}}\";\n+ SearchResponse results3 = client().prepareSearch(\"locations\").setQuery(QueryBuilders.matchAllQuery()).setFilter(pointTest1).execute().actionGet();\n+ assertHitCount(results3, neighbors.size() + 1);\n+\n+ logger.info(\"Testing String format\");\n+ String pointTest2 = \"{\\\"geohash_cell\\\": {\\\"pin\\\": \\\"\" + point.lat() + \",\" + point.lon() + \"\\\",\\\"precision\\\": \" + precision + \",\\\"neighbors\\\": true}}\";\n+ SearchResponse results4 = client().prepareSearch(\"locations\").setQuery(QueryBuilders.matchAllQuery()).setFilter(pointTest2).execute().actionGet();\n+ assertHitCount(results4, neighbors.size() + 1);\n+\n+ logger.info(\"Testing Array format\");\n+ String pointTest3 = \"{\\\"geohash_cell\\\": {\\\"pin\\\": [\" + point.lon() + \",\" + point.lat() + \"],\\\"precision\\\": \" + precision + \",\\\"neighbors\\\": true}}\";\n+ SearchResponse results5 = client().prepareSearch(\"locations\").setQuery(QueryBuilders.matchAllQuery()).setFilter(pointTest3).execute().actionGet();\n+ assertHitCount(results5, neighbors.size() + 1);\n }\n \n @Test", "filename": "src/test/java/org/elasticsearch/test/integration/search/geo/GeoFilterTests.java", "status": "modified" } ] }
{ "body": "To reproduce\n\n``` sh\ncurl -XPOST http://localhost:9200/test/test/1 -d'{\n \"field\": \"value1\"\n}'\n```\n\nThe internal version is now 1.\n\nNow index again:\n\n``` sh\ncurl -XPOST http://localhost:9200/test/test/1 -d'{\n \"field\": \"value2\"\n}'\n```\n\nInternal version is now 2. \n\nTry to update using version=1 (should fail)\n\n``` sh\ncurl -XPOST \"http://localhost:9200/test/test/1/_update?version=1\" -d'{\n \"doc\": { \"field\": \"value3\" }\n}'\n```\n\nWhich doesn't fail with a version conflict but returns:\n\n``` json\n{\"ok\":true,\"_index\":\"test\",\"_type\":\"test\",\"_id\":\"1\",\"_version\":3,\"_previous_version\":2}\n```\n\nPS. The java api's UpdateRequestBuilder doesn't have a setVersion method\n", "comments": [], "number": 3111, "title": "Update api doesn't support versioning" }
{ "body": "Moved version handling from RobinEngine into VersionType. This avoids code re-use and makes it cleaner and easier to read.\n\nCloses #3111\n", "number": 3194, "review_comments": [], "title": "Added version support to update requests" }
{ "commits": [ { "message": "Added version support to update requests\n\nMoved version handling from RobinEngine into VersionType. This avoids code re-use and makes it cleaner and easier to read.\n\nCloses #3111" } ], "files": [ { "diff": "@@ -33,6 +33,7 @@\n import org.elasticsearch.common.bytes.BytesReference;\n import org.elasticsearch.common.io.stream.StreamInput;\n import org.elasticsearch.common.io.stream.StreamOutput;\n+import org.elasticsearch.common.lucene.uid.Versions;\n import org.elasticsearch.common.unit.TimeValue;\n import org.elasticsearch.common.xcontent.XContent;\n import org.elasticsearch.common.xcontent.XContentFactory;\n@@ -155,6 +156,7 @@ BulkRequest internalAdd(UpdateRequest request, @Nullable Object payload) {\n }\n return this;\n }\n+\n /**\n * Adds an {@link DeleteRequest} to the list of actions to execute.\n */\n@@ -272,7 +274,7 @@ public BulkRequest add(BytesReference data, boolean contentUnsafe, @Nullable Str\n String timestamp = null;\n Long ttl = null;\n String opType = null;\n- long version = 0;\n+ long version = Versions.MATCH_ANY;\n VersionType versionType = VersionType.INTERNAL;\n String percolate = null;\n int retryOnConflict = 0;\n@@ -345,6 +347,7 @@ public BulkRequest add(BytesReference data, boolean contentUnsafe, @Nullable Str\n .percolate(percolate), payload);\n } else if (\"update\".equals(action)) {\n internalAdd(new UpdateRequest(index, type, id).routing(routing).parent(parent).retryOnConflict(retryOnConflict)\n+ .version(version).versionType(versionType)\n .source(data.slice(from, nextMarker - from))\n .percolate(percolate), payload);\n }", "filename": "src/main/java/org/elasticsearch/action/bulk/BulkRequest.java", "status": "modified" }, { "diff": "@@ -37,6 +37,7 @@\n import org.elasticsearch.common.bytes.BytesReference;\n import org.elasticsearch.common.io.stream.StreamInput;\n import org.elasticsearch.common.io.stream.StreamOutput;\n+import org.elasticsearch.common.lucene.uid.Versions;\n import org.elasticsearch.common.xcontent.*;\n import org.elasticsearch.index.VersionType;\n import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;\n@@ -130,7 +131,7 @@ public static OpType fromId(byte id) {\n private OpType opType = OpType.INDEX;\n \n private boolean refresh = false;\n- private long version = 0;\n+ private long version = Versions.MATCH_ANY;\n private VersionType versionType = VersionType.INTERNAL;\n private String percolate;\n ", "filename": "src/main/java/org/elasticsearch/action/index/IndexRequest.java", "status": "modified" }, { "diff": "@@ -14,8 +14,10 @@\n import org.elasticsearch.common.unit.TimeValue;\n import org.elasticsearch.common.xcontent.XContentHelper;\n import org.elasticsearch.common.xcontent.XContentType;\n+import org.elasticsearch.index.VersionType;\n import org.elasticsearch.index.engine.DocumentMissingException;\n import org.elasticsearch.index.engine.DocumentSourceMissingException;\n+import org.elasticsearch.index.engine.VersionConflictEngineException;\n import org.elasticsearch.index.get.GetField;\n import org.elasticsearch.index.get.GetResult;\n import org.elasticsearch.index.mapper.internal.ParentFieldMapper;\n@@ -78,9 +80,23 @@ public Result prepare(UpdateRequest request, IndexShard indexShard) {\n .refresh(request.refresh())\n .replicationType(request.replicationType()).consistencyLevel(request.consistencyLevel());\n indexRequest.operationThreaded(false);\n+ if (request.versionType() == VersionType.EXTERNAL) {\n+ // in external versioning mode, we want to create the new document using the given version.\n+ indexRequest.version(request.version()).versionType(VersionType.EXTERNAL);\n+ }\n return new Result(indexRequest, Operation.UPSERT, null, null);\n }\n \n+ if (request.versionType().isVersionConflict(getResult.getVersion(), request.version())) {\n+ throw new VersionConflictEngineException(new ShardId(request.index(), request.shardId()), request.type(), request.id(),\n+ getResult.getVersion(), request.version());\n+ }\n+\n+ long updateVersion = getResult.getVersion();\n+ if (request.versionType() == VersionType.EXTERNAL) {\n+ updateVersion = request.version(); // remember, match_any is excluded by the conflict test\n+ }\n+\n if (getResult.internalSourceRef() == null) {\n // no source, we can't do nothing, through a failure...\n throw new DocumentSourceMissingException(new ShardId(request.index(), request.shardId()), request.type(), request.id());\n@@ -148,20 +164,20 @@ public Result prepare(UpdateRequest request, IndexShard indexShard) {\n }\n }\n \n- // TODO: external version type, does it make sense here? does not seem like it...\n- // TODO: because we use getResult.getVersion we loose the doc.version. The question is where is the right place?\n if (operation == null || \"index\".equals(operation)) {\n final IndexRequest indexRequest = Requests.indexRequest(request.index()).type(request.type()).id(request.id()).routing(routing).parent(parent)\n .source(updatedSourceAsMap, updateSourceContentType)\n- .version(getResult.getVersion()).replicationType(request.replicationType()).consistencyLevel(request.consistencyLevel())\n+ .version(updateVersion).versionType(request.versionType())\n+ .replicationType(request.replicationType()).consistencyLevel(request.consistencyLevel())\n .timestamp(timestamp).ttl(ttl)\n .percolate(request.percolate())\n .refresh(request.refresh());\n indexRequest.operationThreaded(false);\n return new Result(indexRequest, Operation.INDEX, updatedSourceAsMap, updateSourceContentType);\n } else if (\"delete\".equals(operation)) {\n DeleteRequest deleteRequest = Requests.deleteRequest(request.index()).type(request.type()).id(request.id()).routing(routing).parent(parent)\n- .version(getResult.getVersion()).replicationType(request.replicationType()).consistencyLevel(request.consistencyLevel());\n+ .version(updateVersion).versionType(request.versionType())\n+ .replicationType(request.replicationType()).consistencyLevel(request.consistencyLevel());\n deleteRequest.operationThreaded(false);\n return new Result(deleteRequest, Operation.DELETE, updatedSourceAsMap, updateSourceContentType);\n } else if (\"none\".equals(operation)) {", "filename": "src/main/java/org/elasticsearch/action/update/UpdateHelper.java", "status": "modified" }, { "diff": "@@ -31,10 +31,12 @@\n import org.elasticsearch.common.bytes.BytesReference;\n import org.elasticsearch.common.io.stream.StreamInput;\n import org.elasticsearch.common.io.stream.StreamOutput;\n+import org.elasticsearch.common.lucene.uid.Versions;\n import org.elasticsearch.common.xcontent.XContentBuilder;\n import org.elasticsearch.common.xcontent.XContentFactory;\n import org.elasticsearch.common.xcontent.XContentParser;\n import org.elasticsearch.common.xcontent.XContentType;\n+import org.elasticsearch.index.VersionType;\n \n import java.io.IOException;\n import java.util.Map;\n@@ -59,7 +61,9 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>\n \n private String[] fields;\n \n- int retryOnConflict = 0;\n+ private long version = Versions.MATCH_ANY;\n+ private VersionType versionType = VersionType.INTERNAL;\n+ private int retryOnConflict = 0;\n \n private String percolate;\n \n@@ -69,7 +73,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>\n private WriteConsistencyLevel consistencyLevel = WriteConsistencyLevel.DEFAULT;\n \n private IndexRequest upsertRequest;\n- \n+\n private boolean docAsUpsert = false;\n \n @Nullable\n@@ -94,14 +98,19 @@ public ActionRequestValidationException validate() {\n if (id == null) {\n validationException = addValidationError(\"id is missing\", validationException);\n }\n+\n+ if (version != Versions.MATCH_ANY && retryOnConflict > 0) {\n+ validationException = addValidationError(\"can't provide both retry_on_conflict and a specific version\", validationException);\n+ }\n+\n if (script == null && doc == null) {\n validationException = addValidationError(\"script or doc is missing\", validationException);\n }\n if (script != null && doc != null) {\n validationException = addValidationError(\"can't provide both script and doc\", validationException);\n }\n- if(doc == null && docAsUpsert){\n- \tvalidationException = addValidationError(\"can't say to upsert doc without providing doc\", validationException);\n+ if (doc == null && docAsUpsert) {\n+ validationException = addValidationError(\"can't say to upsert doc without providing doc\", validationException);\n }\n return validationException;\n }\n@@ -285,6 +294,31 @@ public int retryOnConflict() {\n return this.retryOnConflict;\n }\n \n+ /**\n+ * Sets the version, which will cause the index operation to only be performed if a matching\n+ * version exists and no changes happened on the doc since then.\n+ */\n+ public UpdateRequest version(long version) {\n+ this.version = version;\n+ return this;\n+ }\n+\n+ public long version() {\n+ return this.version;\n+ }\n+\n+ /**\n+ * Sets the versioning type. Defaults to {@link VersionType#INTERNAL}.\n+ */\n+ public UpdateRequest versionType(VersionType versionType) {\n+ this.versionType = versionType;\n+ return this;\n+ }\n+\n+ public VersionType versionType() {\n+ return this.versionType;\n+ }\n+\n /**\n * Causes the update request document to be percolated. The parameter is the percolate query\n * to use to reduce the percolated queries that are going to run against this doc. Can be\n@@ -396,6 +430,14 @@ public UpdateRequest doc(byte[] source, int offset, int length) {\n return this;\n }\n \n+ /**\n+ * Sets the doc to use for updates when a script is not specified.\n+ */\n+ public UpdateRequest doc(String field, Object value) {\n+ safeDoc().source(field, value);\n+ return this;\n+ }\n+\n public IndexRequest doc() {\n return this.doc;\n }\n@@ -513,8 +555,8 @@ public UpdateRequest source(BytesReference source) throws Exception {\n XContentBuilder docBuilder = XContentFactory.contentBuilder(xContentType);\n docBuilder.copyCurrentStructure(parser);\n safeDoc().source(docBuilder);\n- } else if(\"doc_as_upsert\".equals(currentFieldName)){\n- \tdocAsUpsert(parser.booleanValue());\n+ } else if (\"doc_as_upsert\".equals(currentFieldName)) {\n+ docAsUpsert(parser.booleanValue());\n }\n }\n } finally {\n@@ -526,9 +568,10 @@ public UpdateRequest source(BytesReference source) throws Exception {\n public boolean docAsUpsert() {\n return this.docAsUpsert;\n }\n+\n public void docAsUpsert(boolean shouldUpsertDoc) {\n this.docAsUpsert = shouldUpsertDoc;\n- if(this.doc != null && this.upsertRequest == null){\n+ if (this.doc != null && this.upsertRequest == null) {\n upsert(doc);\n }\n }\n@@ -565,6 +608,8 @@ public void readFrom(StreamInput in) throws IOException {\n if (in.getVersion().onOrAfter(Version.V_0_90_2)) {\n docAsUpsert = in.readBoolean();\n }\n+ version = in.readLong();\n+ versionType = VersionType.fromValue(in.readByte());\n }\n \n @Override\n@@ -612,6 +657,8 @@ public void writeTo(StreamOutput out) throws IOException {\n if (out.getVersion().onOrAfter(Version.V_0_90_2)) {\n out.writeBoolean(docAsUpsert);\n }\n+ out.writeLong(version);\n+ out.writeByte(versionType.getValue());\n }\n \n }", "filename": "src/main/java/org/elasticsearch/action/update/UpdateRequest.java", "status": "modified" }, { "diff": "@@ -29,6 +29,7 @@\n import org.elasticsearch.common.bytes.BytesReference;\n import org.elasticsearch.common.xcontent.XContentBuilder;\n import org.elasticsearch.common.xcontent.XContentType;\n+import org.elasticsearch.index.VersionType;\n \n import java.util.Map;\n \n@@ -124,6 +125,24 @@ public UpdateRequestBuilder setRetryOnConflict(int retryOnConflict) {\n return this;\n }\n \n+ /**\n+ * Sets the version, which will cause the index operation to only be performed if a matching\n+ * version exists and no changes happened on the doc since then.\n+ */\n+ public UpdateRequestBuilder setVersion(long version) {\n+ request.version(version);\n+ return this;\n+ }\n+\n+ /**\n+ * Sets the versioning type. Defaults to {@link org.elasticsearch.index.VersionType#INTERNAL}.\n+ */\n+ public UpdateRequestBuilder setVersionType(VersionType versionType) {\n+ request.versionType(versionType);\n+ return this;\n+ }\n+\n+\n /**\n * Should a refresh be executed post this update operation causing the operation to\n * be searchable. Note, heavy indexing should not set this to <tt>true</tt>. Defaults\n@@ -216,6 +235,14 @@ public UpdateRequestBuilder setDoc(byte[] source, int offset, int length) {\n return this;\n }\n \n+ /**\n+ * Sets the doc to use for updates when a script is not specified.\n+ */\n+ public UpdateRequestBuilder setDoc(String field, Object value) {\n+ request.doc(field, value);\n+ return this;\n+ }\n+\n /**\n * Sets the index request to be used if the document does not exists. Otherwise, a {@link org.elasticsearch.index.engine.DocumentMissingException}\n * is thrown.\n@@ -305,5 +332,4 @@ public UpdateRequestBuilder setDocAsUpsert(boolean shouldUpsertDoc) {\n protected void doExecute(ActionListener<UpdateResponse> listener) {\n ((Client) client).update(request, listener);\n }\n-\n }", "filename": "src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java", "status": "modified" }, { "diff": "@@ -19,13 +19,47 @@\n package org.elasticsearch.index;\n \n import org.elasticsearch.ElasticSearchIllegalArgumentException;\n+import org.elasticsearch.common.lucene.uid.Versions;\n \n /**\n *\n */\n public enum VersionType {\n- INTERNAL((byte) 0),\n- EXTERNAL((byte) 1);\n+ INTERNAL((byte) 0) {\n+ /**\n+ * - always returns false if currentVersion == {@link Versions#NOT_SET}\n+ * - always accepts expectedVersion == {@link Versions#MATCH_ANY}\n+ * - if expectedVersion is set, always conflict if currentVersion == {@link Versions#NOT_FOUND}\n+ */\n+ @Override\n+ public boolean isVersionConflict(long currentVersion, long expectedVersion) {\n+ return currentVersion != Versions.NOT_SET && expectedVersion != Versions.MATCH_ANY\n+ && (currentVersion == Versions.NOT_FOUND || currentVersion != expectedVersion);\n+ }\n+\n+ @Override\n+ public long updateVersion(long currentVersion, long expectedVersion) {\n+ return (currentVersion == Versions.NOT_SET || currentVersion == Versions.NOT_FOUND) ? 1 : currentVersion + 1;\n+ }\n+\n+ },\n+ EXTERNAL((byte) 1) {\n+ /**\n+ * - always returns false if currentVersion == {@link Versions#NOT_SET}\n+ * - always conflict if expectedVersion == {@link Versions#MATCH_ANY} (we need something to set)\n+ * - accepts currentVersion == {@link Versions#NOT_FOUND}\n+ */\n+ @Override\n+ public boolean isVersionConflict(long currentVersion, long expectedVersion) {\n+ return currentVersion != Versions.NOT_SET && currentVersion != Versions.NOT_FOUND\n+ && (expectedVersion == Versions.MATCH_ANY || currentVersion >= expectedVersion);\n+ }\n+\n+ @Override\n+ public long updateVersion(long currentVersion, long expectedVersion) {\n+ return expectedVersion;\n+ }\n+ };\n \n private final byte value;\n \n@@ -37,6 +71,20 @@ public byte getValue() {\n return value;\n }\n \n+ /**\n+ * Checks whether the current version conflicts with the expected version, based on the current version type.\n+ *\n+ * @return true if versions conflict false o.w.\n+ */\n+ public abstract boolean isVersionConflict(long currentVersion, long expectedVersion);\n+\n+ /**\n+ * Returns the new version for a document, based on it's current one and the specified in the request\n+ *\n+ * @return new version\n+ */\n+ public abstract long updateVersion(long currentVersion, long expectedVersion);\n+\n public static VersionType fromString(String versionType) {\n if (\"internal\".equals(versionType)) {\n return INTERNAL;", "filename": "src/main/java/org/elasticsearch/index/VersionType.java", "status": "modified" }, { "diff": "@@ -394,49 +394,23 @@ private void innerCreate(Create create, IndexWriter writer) throws IOException {\n \n // same logic as index\n long updatedVersion;\n+ long expectedVersion = create.version();\n if (create.origin() == Operation.Origin.PRIMARY) {\n- if (create.versionType() == VersionType.INTERNAL) { // internal version type\n- long expectedVersion = create.version();\n- if (expectedVersion != Versions.MATCH_ANY && currentVersion != Versions.NOT_SET) {\n- // an explicit version is provided, see if there is a conflict\n- // if we did not find anything, and a version is provided, so we do expect to find a doc under that version\n- // this is important, since we don't allow to preset a version in order to handle deletes\n- if (currentVersion == Versions.NOT_FOUND) {\n- throw new VersionConflictEngineException(shardId, create.type(), create.id(), Versions.NOT_FOUND, expectedVersion);\n- } else if (expectedVersion != currentVersion) {\n- throw new VersionConflictEngineException(shardId, create.type(), create.id(), currentVersion, expectedVersion);\n- }\n-\n- }\n- updatedVersion = (currentVersion == Versions.NOT_SET || currentVersion == Versions.NOT_FOUND) ? 1 : currentVersion + 1;\n- } else { // external version type\n- // an external version is provided, just check, if a local version exists, that its higher than it\n- // the actual version checking is one in an external system, and we just want to not index older versions\n- if (currentVersion >= 0) { // we can check!, its there\n- if (currentVersion >= create.version()) {\n- throw new VersionConflictEngineException(shardId, create.type(), create.id(), currentVersion, create.version());\n- }\n- }\n- updatedVersion = create.version();\n+ if (create.versionType().isVersionConflict(currentVersion, expectedVersion)) {\n+ throw new VersionConflictEngineException(shardId, create.type(), create.id(), currentVersion, expectedVersion);\n }\n+ updatedVersion = create.versionType().updateVersion(currentVersion, expectedVersion);\n } else { // if (index.origin() == Operation.Origin.REPLICA || index.origin() == Operation.Origin.RECOVERY) {\n- long expectedVersion = create.version();\n- if (currentVersion != Versions.NOT_SET) { // we don't have a version, so ignore...\n- // if it does not exists, and its considered the first index operation (replicas/recovery are 1 of)\n- // then nothing to check\n- if (!(currentVersion == Versions.NOT_FOUND && create.version() == 1)) {\n- // with replicas/recovery, we only check for previous version, we allow to set a future version\n- if (expectedVersion <= currentVersion) {\n- if (create.origin() == Operation.Origin.RECOVERY) {\n- return;\n- } else {\n- throw new VersionConflictEngineException(shardId, create.type(), create.id(), currentVersion, expectedVersion);\n- }\n- }\n+ // replicas treat the version as \"external\" as it comes from the primary ->\n+ // only exploding if the version they got is lower or equal to what they know.\n+ if (VersionType.EXTERNAL.isVersionConflict(currentVersion, expectedVersion)) {\n+ if (create.origin() == Operation.Origin.RECOVERY) {\n+ return;\n+ } else {\n+ throw new VersionConflictEngineException(shardId, create.type(), create.id(), currentVersion, expectedVersion);\n }\n }\n- // replicas already hold the \"future\" version\n- updatedVersion = create.version();\n+ updatedVersion = VersionType.EXTERNAL.updateVersion(currentVersion, expectedVersion);\n }\n \n // if the doc does not exists or it exists but not delete\n@@ -516,49 +490,25 @@ private void innerIndex(Index index, IndexWriter writer) throws IOException {\n }\n \n long updatedVersion;\n+ long expectedVersion = index.version();\n if (index.origin() == Operation.Origin.PRIMARY) {\n- if (index.versionType() == VersionType.INTERNAL) { // internal version type\n- long expectedVersion = index.version();\n- if (expectedVersion != Versions.MATCH_ANY && currentVersion != Versions.NOT_SET) {\n- // an explicit version is provided, see if there is a conflict\n- // if we did not find anything, and a version is provided, so we do expect to find a doc under that version\n- // this is important, since we don't allow to preset a version in order to handle deletes\n- if (currentVersion == Versions.NOT_FOUND) {\n- throw new VersionConflictEngineException(shardId, index.type(), index.id(), Versions.NOT_FOUND, expectedVersion);\n- } else if (expectedVersion != currentVersion) {\n- throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, expectedVersion);\n- }\n- }\n- updatedVersion = (currentVersion == Versions.NOT_SET || currentVersion == Versions.NOT_FOUND) ? 1 : currentVersion + 1;\n-\n- } else { // external version type\n- // an external version is provided, just check, if a local version exists, that its higher than it\n- // the actual version checking is one in an external system, and we just want to not index older versions\n- if (currentVersion >= 0) { // we can check!, its there\n- if (currentVersion >= index.version()) {\n- throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, index.version());\n- }\n- }\n- updatedVersion = index.version();\n+ if (index.versionType().isVersionConflict(currentVersion, expectedVersion)) {\n+ throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, expectedVersion);\n }\n+\n+ updatedVersion = index.versionType().updateVersion(currentVersion, expectedVersion);\n+\n } else { // if (index.origin() == Operation.Origin.REPLICA || index.origin() == Operation.Origin.RECOVERY) {\n- long expectedVersion = index.version();\n- if (currentVersion != Versions.NOT_SET) { // we don't have a version, so ignore...\n- // if it does not exists, and its considered the first index operation (replicas/recovery are 1 of)\n- // then nothing to check\n- if (!(currentVersion == Versions.NOT_FOUND && index.version() == 1)) {\n- // with replicas/recovery, we only check for previous version, we allow to set a future version\n- if (expectedVersion <= currentVersion) {\n- if (index.origin() == Operation.Origin.RECOVERY) {\n- return;\n- } else {\n- throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, expectedVersion);\n- }\n- }\n+ // replicas treat the version as \"external\" as it comes from the primary ->\n+ // only exploding if the version they got is lower or equal to what they know.\n+ if (VersionType.EXTERNAL.isVersionConflict(currentVersion, expectedVersion)) {\n+ if (index.origin() == Operation.Origin.RECOVERY) {\n+ return;\n+ } else {\n+ throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, expectedVersion);\n }\n }\n- // replicas already hold the \"future\" version\n- updatedVersion = index.version();\n+ updatedVersion = VersionType.EXTERNAL.updateVersion(currentVersion, expectedVersion);\n }\n \n index.version(updatedVersion);\n@@ -571,7 +521,9 @@ private void innerIndex(Index index, IndexWriter writer) throws IOException {\n writer.addDocument(index.docs().get(0), index.analyzer());\n }\n } else {\n- if (versionValue != null) index.created(versionValue.delete()); // we have a delete which is not GC'ed...\n+ if (versionValue != null) {\n+ index.created(versionValue.delete()); // we have a delete which is not GC'ed...\n+ }\n if (index.docs().size() > 1) {\n writer.updateDocuments(index.uid(), index.docs(), index.analyzer());\n } else {\n@@ -629,45 +581,25 @@ private void innerDelete(Delete delete, IndexWriter writer) throws IOException {\n }\n \n long updatedVersion;\n+ long expectedVersion = delete.version();\n if (delete.origin() == Operation.Origin.PRIMARY) {\n- if (delete.versionType() == VersionType.INTERNAL) { // internal version type\n- if (delete.version() != Versions.MATCH_ANY && currentVersion != Versions.NOT_SET) { // we don't have a version, so ignore...\n- // an explicit version is provided, see if there is a conflict\n- // if we did not find anything and a version is provided, so we do expect to find a doc under that version\n- if (currentVersion == Versions.NOT_FOUND) {\n- throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), Versions.NOT_FOUND, delete.version());\n- } else if (delete.version() != currentVersion) {\n- throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), currentVersion, delete.version());\n- }\n- }\n- updatedVersion = (currentVersion == Versions.NOT_SET || currentVersion == Versions.NOT_FOUND) ? 1 : currentVersion + 1;\n-\n- } else { // External\n- if (currentVersion == Versions.NOT_FOUND) {\n- // its an external version, that's fine, we allow it to be set\n- //throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), UidField.DocIdAndVersion.Versions.NOT_FOUND, delete.version());\n- } else if (currentVersion >= delete.version()) {\n- throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), currentVersion, delete.version());\n- }\n- updatedVersion = delete.version();\n+ if (delete.versionType().isVersionConflict(currentVersion, expectedVersion)) {\n+ throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), currentVersion, expectedVersion);\n }\n+\n+ updatedVersion = delete.versionType().updateVersion(currentVersion, expectedVersion);\n+\n } else { // if (index.origin() == Operation.Origin.REPLICA || index.origin() == Operation.Origin.RECOVERY) {\n- // on replica, the version is the future value expected (returned from the operation on the primary)\n- if (currentVersion != Versions.NOT_SET) { // we don't have a version in the index, ignore\n- // only check if we have a version for it, otherwise, ignore (see later)\n- if (currentVersion != Versions.NOT_FOUND) {\n- // with replicas, we only check for previous version, we allow to set a future version\n- if (delete.version() <= currentVersion) {\n- if (delete.origin() == Operation.Origin.RECOVERY) {\n- return;\n- } else {\n- throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), currentVersion - 1, delete.version());\n- }\n- }\n+ // replicas treat the version as \"external\" as it comes from the primary ->\n+ // only exploding if the version they got is lower or equal to what they know.\n+ if (VersionType.EXTERNAL.isVersionConflict(currentVersion, expectedVersion)) {\n+ if (delete.origin() == Operation.Origin.RECOVERY) {\n+ return;\n+ } else {\n+ throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), currentVersion - 1, expectedVersion);\n }\n }\n- // replicas already hold the \"future\" version\n- updatedVersion = delete.version();\n+ updatedVersion = VersionType.EXTERNAL.updateVersion(currentVersion, expectedVersion);\n }\n \n if (currentVersion == Versions.NOT_FOUND) {\n@@ -1056,7 +988,9 @@ public <T> T snapshot(SnapshotHandler<T> snapshotHandler) throws EngineException\n snapshotIndexCommit = deletionPolicy.snapshot();\n traslogSnapshot = translog.snapshot();\n } catch (Exception e) {\n- if (snapshotIndexCommit != null) snapshotIndexCommit.release();\n+ if (snapshotIndexCommit != null) {\n+ snapshotIndexCommit.release();\n+ }\n throw new SnapshotFailedEngineException(shardId, e);\n } finally {\n rwl.readLock().unlock();", "filename": "src/main/java/org/elasticsearch/index/engine/robin/RobinEngine.java", "status": "modified" }, { "diff": "@@ -86,6 +86,9 @@ public void handleRequest(final RestRequest request, final RestChannel channel)\n }\n }\n updateRequest.retryOnConflict(request.paramAsInt(\"retry_on_conflict\", updateRequest.retryOnConflict()));\n+ updateRequest.version(RestActions.parseVersion(request));\n+ updateRequest.versionType(VersionType.fromString(request.param(\"version_type\"), updateRequest.versionType()));\n+\n \n // see if we have it in the body\n if (request.hasContent()) {", "filename": "src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java", "status": "modified" }, { "diff": "@@ -20,9 +20,10 @@\n \n import org.apache.lucene.search.BooleanQuery;\n import org.apache.lucene.search.Query;\n-import org.elasticsearch.action.count.CountResponse;\n import org.elasticsearch.ElasticSearchException;\n import org.elasticsearch.action.ActionFuture;\n+import org.elasticsearch.action.ActionRequestBuilder;\n+import org.elasticsearch.action.count.CountResponse;\n import org.elasticsearch.action.search.SearchResponse;\n import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse;\n import org.elasticsearch.search.SearchHit;\n@@ -65,14 +66,14 @@ public static void assertThirdHit(SearchResponse searchResponse, Matcher<SearchH\n public static void assertSearchHit(SearchResponse searchResponse, int number, Matcher<SearchHit> matcher) {\n assert number > 0;\n assertThat(\"SearchHit number must be greater than 0\", number, greaterThan(0));\n- assertThat(searchResponse.getHits().totalHits(), greaterThanOrEqualTo((long)number));\n- assertSearchHit(searchResponse.getHits().getAt(number-1), matcher);\n+ assertThat(searchResponse.getHits().totalHits(), greaterThanOrEqualTo((long) number));\n+ assertSearchHit(searchResponse.getHits().getAt(number - 1), matcher);\n }\n- \n+\n public static void assertNoFailures(SearchResponse searchResponse) {\n assertThat(\"Unexpectd ShardFailures: \" + Arrays.toString(searchResponse.getShardFailures()), searchResponse.getShardFailures().length, equalTo(0));\n }\n- \n+\n public static void assertNoFailures(BroadcastOperationResponse response) {\n assertThat(\"Unexpectd ShardFailures: \" + Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0));\n }\n@@ -88,16 +89,16 @@ public static void assertHighlight(SearchResponse resp, int hit, String field, i\n assertThat(resp.getHits().hits()[hit].getHighlightFields().get(field).fragments().length, greaterThan(fragment));\n assertThat(resp.getHits().hits()[hit].highlightFields().get(field).fragments()[fragment].string(), matcher);\n }\n- \n+\n public static void assertSuggestionSize(Suggest searchSuggest, int entry, int size, String key) {\n assertThat(searchSuggest, notNullValue());\n- assertThat(searchSuggest.size(),greaterThanOrEqualTo(1));\n+ assertThat(searchSuggest.size(), greaterThanOrEqualTo(1));\n assertThat(searchSuggest.getSuggestion(key).getName(), equalTo(key));\n assertThat(searchSuggest.getSuggestion(key).getEntries().size(), greaterThanOrEqualTo(entry));\n assertThat(searchSuggest.getSuggestion(key).getEntries().get(entry).getOptions().size(), equalTo(size));\n \n }\n- \n+\n public static void assertSuggestion(Suggest searchSuggest, int entry, int ord, String key, String text) {\n assertThat(searchSuggest, notNullValue());\n assertThat(searchSuggest.size(), greaterThanOrEqualTo(1));\n@@ -121,31 +122,34 @@ public static Matcher<SearchHit> hasType(final String type) {\n public static Matcher<SearchHit> hasIndex(final String index) {\n return new ElasticsearchMatchers.SearchHitHasIndexMatcher(index);\n }\n- \n+\n public static <T extends Query> T assertBooleanSubQuery(Query query, Class<T> subqueryType, int i) {\n assertThat(query, instanceOf(BooleanQuery.class));\n BooleanQuery q = (BooleanQuery) query;\n assertThat(q.getClauses().length, greaterThan(i));\n assertThat(q.getClauses()[i].getQuery(), instanceOf(subqueryType));\n- return (T)q.getClauses()[i].getQuery();\n+ return (T) q.getClauses()[i].getQuery();\n+ }\n+\n+ public static <E extends Throwable> void assertThrows(ActionRequestBuilder<?, ?, ?> builder, Class<E> exceptionClass) {\n+ assertThrows(builder.execute(), exceptionClass);\n }\n \n- public static <E extends Throwable> void assertThrows(ActionFuture future, Class<E> exceptionClass) {\n- boolean fail=false;\n+ public static <E extends Throwable> void assertThrows(ActionFuture future, Class<E> exceptionClass) {\n+ boolean fail = false;\n try {\n future.actionGet();\n- fail=true;\n+ fail = true;\n \n- }\n- catch (ElasticSearchException esException) {\n+ } catch (ElasticSearchException esException) {\n assertThat(esException.unwrapCause(), instanceOf(exceptionClass));\n- }\n- catch (Throwable e) {\n+ } catch (Throwable e) {\n assertThat(e, instanceOf(exceptionClass));\n }\n // has to be outside catch clause to get a proper message\n- if (fail)\n+ if (fail) {\n throw new AssertionError(\"Expected a \" + exceptionClass + \" exception to be thrown\");\n+ }\n }\n \n }", "filename": "src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java", "status": "modified" }, { "diff": "@@ -30,6 +30,7 @@\n import org.elasticsearch.action.admin.indices.flush.FlushResponse;\n import org.elasticsearch.action.admin.indices.optimize.OptimizeResponse;\n import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;\n+import org.elasticsearch.action.get.GetResponse;\n import org.elasticsearch.action.index.IndexResponse;\n import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder;\n import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse;\n@@ -46,7 +47,6 @@\n import org.elasticsearch.common.settings.ImmutableSettings;\n import org.elasticsearch.common.settings.Settings;\n import org.elasticsearch.common.xcontent.XContentBuilder;\n-import org.elasticsearch.index.merge.policy.AbstractMergePolicyProvider;\n import org.elasticsearch.indices.IndexAlreadyExistsException;\n import org.elasticsearch.indices.IndexMissingException;\n import org.elasticsearch.indices.IndexTemplateMissingException;\n@@ -58,7 +58,6 @@\n import java.io.IOException;\n import java.util.HashSet;\n import java.util.Iterator;\n-import java.util.Random;\n import java.util.Set;\n \n import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;\n@@ -319,6 +318,10 @@ protected IndexResponse index(String index, String type, XContentBuilder source)\n return client().prepareIndex(index, type).setSource(source).execute().actionGet();\n }\n \n+ protected GetResponse get(String index, String type, String id) {\n+ return client().prepareGet(index, type, id).execute().actionGet();\n+ }\n+\n protected IndexResponse index(String index, String type, String id, String field, Object value) {\n return client().prepareIndex(index, type, id).setSource(field, value).execute().actionGet();\n }", "filename": "src/test/java/org/elasticsearch/test/integration/AbstractSharedClusterTest.java", "status": "modified" }, { "diff": "@@ -1,21 +1,23 @@\n package org.elasticsearch.test.integration.document;\n \n-import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;\n-import static org.hamcrest.MatcherAssert.assertThat;\n-import static org.hamcrest.Matchers.containsString;\n-import static org.hamcrest.Matchers.equalTo;\n-import static org.hamcrest.Matchers.nullValue;\n-\n import org.elasticsearch.action.bulk.BulkRequestBuilder;\n import org.elasticsearch.action.bulk.BulkResponse;\n import org.elasticsearch.action.get.GetResponse;\n+import org.elasticsearch.action.index.IndexResponse;\n import org.elasticsearch.action.update.UpdateRequestBuilder;\n import org.elasticsearch.action.update.UpdateResponse;\n import org.elasticsearch.common.Priority;\n import org.elasticsearch.common.settings.ImmutableSettings;\n+import org.elasticsearch.index.VersionType;\n import org.elasticsearch.test.integration.AbstractSharedClusterTest;\n import org.testng.annotations.Test;\n \n+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;\n+import static org.hamcrest.MatcherAssert.assertThat;\n+import static org.hamcrest.Matchers.*;\n+import static org.testng.Assert.assertFalse;\n+import static org.testng.Assert.assertTrue;\n+\n /**\n */\n public class BulkTests extends AbstractSharedClusterTest {\n@@ -105,6 +107,53 @@ public void testBulkUpdate_simple() throws Exception {\n assertThat(((Long) getResponse.getField(\"field\").getValue()), equalTo(4l));\n }\n \n+ @Test\n+ public void testBulkVersioning() throws Exception {\n+ createIndex(\"test\");\n+ ensureGreen();\n+ BulkResponse bulkResponse = run(client().prepareBulk()\n+ .add(client().prepareIndex(\"test\", \"type\", \"1\").setCreate(true).setSource(\"field\", \"1\"))\n+ .add(client().prepareIndex(\"test\", \"type\", \"2\").setCreate(true).setSource(\"field\", \"1\"))\n+ .add(client().prepareIndex(\"test\", \"type\", \"1\").setSource(\"field\", \"2\")));\n+\n+ assertTrue(((IndexResponse) bulkResponse.getItems()[0].getResponse()).isCreated());\n+ assertThat(((IndexResponse) bulkResponse.getItems()[0].getResponse()).getVersion(), equalTo(1l));\n+ assertTrue(((IndexResponse) bulkResponse.getItems()[1].getResponse()).isCreated());\n+ assertThat(((IndexResponse) bulkResponse.getItems()[1].getResponse()).getVersion(), equalTo(1l));\n+ assertFalse(((IndexResponse) bulkResponse.getItems()[2].getResponse()).isCreated());\n+ assertThat(((IndexResponse) bulkResponse.getItems()[2].getResponse()).getVersion(), equalTo(2l));\n+\n+ bulkResponse = run(client().prepareBulk()\n+ .add(client().prepareUpdate(\"test\", \"type\", \"1\").setVersion(4l).setDoc(\"field\", \"2\"))\n+ .add(client().prepareUpdate(\"test\", \"type\", \"2\").setDoc(\"field\", \"2\"))\n+ .add(client().prepareUpdate(\"test\", \"type\", \"1\").setVersion(2l).setDoc(\"field\", \"3\")));\n+\n+ assertThat(bulkResponse.getItems()[0].getFailureMessage(), containsString(\"Version\"));\n+ assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getVersion(), equalTo(2l));\n+ assertThat(((UpdateResponse) bulkResponse.getItems()[2].getResponse()).getVersion(), equalTo(3l));\n+\n+ bulkResponse = run(client().prepareBulk()\n+ .add(client().prepareIndex(\"test\", \"type\", \"e1\").setCreate(true).setSource(\"field\", \"1\").setVersion(10).setVersionType(VersionType.EXTERNAL))\n+ .add(client().prepareIndex(\"test\", \"type\", \"e2\").setCreate(true).setSource(\"field\", \"1\").setVersion(10).setVersionType(VersionType.EXTERNAL))\n+ .add(client().prepareIndex(\"test\", \"type\", \"e1\").setSource(\"field\", \"2\").setVersion(12).setVersionType(VersionType.EXTERNAL)));\n+\n+ assertTrue(((IndexResponse) bulkResponse.getItems()[0].getResponse()).isCreated());\n+ assertThat(((IndexResponse) bulkResponse.getItems()[0].getResponse()).getVersion(), equalTo(10l));\n+ assertTrue(((IndexResponse) bulkResponse.getItems()[1].getResponse()).isCreated());\n+ assertThat(((IndexResponse) bulkResponse.getItems()[1].getResponse()).getVersion(), equalTo(10l));\n+ assertFalse(((IndexResponse) bulkResponse.getItems()[2].getResponse()).isCreated());\n+ assertThat(((IndexResponse) bulkResponse.getItems()[2].getResponse()).getVersion(), equalTo(12l));\n+\n+ bulkResponse = run(client().prepareBulk()\n+ .add(client().prepareUpdate(\"test\", \"type\", \"e1\").setVersion(4l).setDoc(\"field\", \"2\").setVersion(10).setVersionType(VersionType.EXTERNAL))\n+ .add(client().prepareUpdate(\"test\", \"type\", \"e2\").setDoc(\"field\", \"2\").setVersion(15).setVersionType(VersionType.EXTERNAL))\n+ .add(client().prepareUpdate(\"test\", \"type\", \"e1\").setVersion(2l).setDoc(\"field\", \"3\").setVersion(15).setVersionType(VersionType.EXTERNAL)));\n+\n+ assertThat(bulkResponse.getItems()[0].getFailureMessage(), containsString(\"Version\"));\n+ assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getVersion(), equalTo(15l));\n+ assertThat(((UpdateResponse) bulkResponse.getItems()[2].getResponse()).getVersion(), equalTo(15l));\n+ }\n+\n @Test\n public void testBulkUpdate_malformedScripts() throws Exception {\n client().admin().indices().prepareDelete().execute().actionGet();\n@@ -140,7 +189,7 @@ public void testBulkUpdate_malformedScripts() throws Exception {\n \n assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getId(), equalTo(\"2\"));\n assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getVersion(), equalTo(2l));\n- assertThat(((Integer)((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getGetResult().field(\"field\").getValue()), equalTo(2));\n+ assertThat(((Integer) ((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getGetResult().field(\"field\").getValue()), equalTo(2));\n assertThat(bulkResponse.getItems()[1].getFailure(), nullValue());\n \n assertThat(bulkResponse.getItems()[2].getFailure().getId(), equalTo(\"3\"));\n@@ -182,7 +231,7 @@ public void testBulkUpdate_largerVolume() throws Exception {\n assertThat(response.getItems()[i].getOpType(), equalTo(\"update\"));\n assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getId(), equalTo(Integer.toString(i)));\n assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getVersion(), equalTo(1l));\n- assertThat(((Integer)((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field(\"counter\").getValue()), equalTo(1));\n+ assertThat(((Integer) ((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field(\"counter\").getValue()), equalTo(1));\n \n for (int j = 0; j < 5; j++) {\n GetResponse getResponse = client().prepareGet(\"test\", \"type1\", Integer.toString(i)).setFields(\"counter\").execute().actionGet();\n@@ -219,7 +268,7 @@ public void testBulkUpdate_largerVolume() throws Exception {\n assertThat(response.getItems()[i].getOpType(), equalTo(\"update\"));\n assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getId(), equalTo(Integer.toString(i)));\n assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getVersion(), equalTo(2l));\n- assertThat(((Integer)((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field(\"counter\").getValue()), equalTo(2));\n+ assertThat(((Integer) ((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field(\"counter\").getValue()), equalTo(2));\n }\n \n builder = client().prepareBulk();", "filename": "src/test/java/org/elasticsearch/test/integration/document/BulkTests.java", "status": "modified" }, { "diff": "@@ -29,7 +29,9 @@\n import org.elasticsearch.common.Priority;\n import org.elasticsearch.common.xcontent.XContentFactory;\n import org.elasticsearch.common.xcontent.XContentHelper;\n+import org.elasticsearch.index.VersionType;\n import org.elasticsearch.index.engine.DocumentMissingException;\n+import org.elasticsearch.index.engine.VersionConflictEngineException;\n import org.elasticsearch.test.integration.AbstractSharedClusterTest;\n import org.testng.annotations.Test;\n \n@@ -39,6 +41,7 @@\n \n import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;\n import static org.elasticsearch.index.query.QueryBuilders.termQuery;\n+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows;\n import static org.hamcrest.MatcherAssert.assertThat;\n import static org.hamcrest.Matchers.*;\n import static org.testng.AssertJUnit.*;\n@@ -163,22 +166,23 @@ public void testUpsert() throws Exception {\n assertThat(getResponse.getSourceAsMap().get(\"field\").toString(), equalTo(\"2\"));\n }\n }\n+\n @Test\n public void testUpsertDoc() throws Exception {\n- \tcreateIndex();\n+ createIndex();\n ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();\n assertThat(clusterHealth.isTimedOut(), equalTo(false));\n assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN));\n- \n+\n UpdateResponse updateResponse = client().prepareUpdate(\"test\", \"type1\", \"1\")\n- \t\t.setDoc(XContentFactory.jsonBuilder().startObject().field(\"bar\", \"baz\").endObject()) \t\t\n- \t\t.setDocAsUpsert(true)\n+ .setDoc(XContentFactory.jsonBuilder().startObject().field(\"bar\", \"baz\").endObject())\n+ .setDocAsUpsert(true)\n .setFields(\"_source\")\n .execute().actionGet();\n assertThat(updateResponse.getGetResult(), notNullValue());\n assertThat(updateResponse.getGetResult().sourceAsMap().get(\"bar\").toString(), equalTo(\"baz\"));\n }\n- \n+\n @Test\n public void testUpsertFields() throws Exception {\n createIndex();\n@@ -207,6 +211,60 @@ public void testUpsertFields() throws Exception {\n assertThat(updateResponse.getGetResult().sourceAsMap().get(\"extra\").toString(), equalTo(\"foo\"));\n }\n \n+ @Test\n+ public void testVersionedUpdate() throws Exception {\n+ createIndex(\"test\");\n+ ensureGreen();\n+\n+ index(\"test\", \"type\", \"1\", \"text\", \"value\"); // version is now 1\n+\n+ assertThrows(client().prepareUpdate(\"test\", \"type\", \"1\").setScript(\"ctx._source.text = 'v2'\").setVersion(2).execute(),\n+ VersionConflictEngineException.class);\n+\n+ run(client().prepareUpdate(\"test\", \"type\", \"1\").setScript(\"ctx._source.text = 'v2'\").setVersion(1));\n+ assertThat(run(client().prepareGet(\"test\", \"type\", \"1\")).getVersion(), equalTo(2l));\n+\n+ // and again with a higher version..\n+ run(client().prepareUpdate(\"test\", \"type\", \"1\").setScript(\"ctx._source.text = 'v3'\").setVersion(2));\n+\n+ assertThat(run(client().prepareGet(\"test\", \"type\", \"1\")).getVersion(), equalTo(3l));\n+\n+ // after delete\n+ run(client().prepareDelete(\"test\", \"type\", \"1\"));\n+ assertThrows(client().prepareUpdate(\"test\", \"type\", \"1\").setScript(\"ctx._source.text = 'v2'\").setVersion(3).execute(),\n+ DocumentMissingException.class);\n+\n+ // external versioning\n+ run(client().prepareIndex(\"test\", \"type\", \"2\").setSource(\"text\", \"value\").setVersion(10).setVersionType(VersionType.EXTERNAL));\n+ assertThrows(client().prepareUpdate(\"test\", \"type\", \"2\").setScript(\"ctx._source.text = 'v2'\").setVersion(2).setVersionType(VersionType.EXTERNAL).execute(),\n+ VersionConflictEngineException.class);\n+\n+ run(client().prepareUpdate(\"test\", \"type\", \"2\").setScript(\"ctx._source.text = 'v2'\").setVersion(11).setVersionType(VersionType.EXTERNAL));\n+\n+ assertThat(run(client().prepareGet(\"test\", \"type\", \"2\")).getVersion(), equalTo(11l));\n+\n+ // upserts - the combination with versions is a bit weird. Test are here to ensure we do not change our behavior unintentionally\n+\n+ // With internal versions, tt means \"if object is there with version X, update it or explode. If it is not there, index.\n+ run(client().prepareUpdate(\"test\", \"type\", \"3\").setScript(\"ctx._source.text = 'v2'\").setVersion(10).setUpsertRequest(\"{ \\\"text\\\": \\\"v0\\\" }\"));\n+ GetResponse get = get(\"test\", \"type\", \"3\");\n+ assertThat(get.getVersion(), equalTo(1l));\n+ assertThat((String) get.getSource().get(\"text\"), equalTo(\"v0\"));\n+\n+ // With external versions, it means - if object is there with version lower than X, update it or explode. If it is not there, insert with new version.\n+ run(client().prepareUpdate(\"test\", \"type\", \"4\").setScript(\"ctx._source.text = 'v2'\").\n+ setVersion(10).setVersionType(VersionType.EXTERNAL).setUpsertRequest(\"{ \\\"text\\\": \\\"v0\\\" }\"));\n+ get = get(\"test\", \"type\", \"4\");\n+ assertThat(get.getVersion(), equalTo(10l));\n+ assertThat((String) get.getSource().get(\"text\"), equalTo(\"v0\"));\n+\n+\n+ // retry on conflict is rejected:\n+\n+ assertThrows(client().prepareUpdate(\"test\", \"type\", \"1\").setVersion(10).setRetryOnConflict(5), ActionRequestValidationException.class);\n+\n+ }\n+\n @Test\n public void testIndexAutoCreation() throws Exception {\n try {\n@@ -390,10 +448,10 @@ public void testUpdateRequestWithBothScriptAndDoc() throws Exception {\n assertThat(e.getMessage(), containsString(\"can't provide both script and doc\"));\n }\n }\n- \n+\n @Test\n- public void testUpdateRequestWithScriptAndShouldUpsertDoc() throws Exception{\n- \tcreateIndex();\n+ public void testUpdateRequestWithScriptAndShouldUpsertDoc() throws Exception {\n+ createIndex();\n ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();\n assertThat(clusterHealth.isTimedOut(), equalTo(false));\n assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN));", "filename": "src/test/java/org/elasticsearch/test/integration/update/UpdateTests.java", "status": "modified" }, { "diff": "@@ -0,0 +1,115 @@\n+package org.elasticsearch.test.unit.index;\n+/*\n+ * Licensed to ElasticSearch under one\n+ * or more contributor license agreements. See the NOTICE file\n+ * distributed with this work for additional information\n+ * regarding copyright ownership. ElasticSearch licenses this\n+ * file to you under the Apache License, Version 2.0 (the\n+ * \"License\"); you may not use this file except in compliance\n+ * with the License. You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing,\n+ * software distributed under the License is distributed on an\n+ * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n+ * KIND, either express or implied. See the License for the\n+ * specific language governing permissions and limitations\n+ * under the License.\n+ */\n+\n+\n+import org.elasticsearch.common.lucene.uid.Versions;\n+import org.elasticsearch.index.VersionType;\n+import org.testng.annotations.Test;\n+\n+import static org.hamcrest.MatcherAssert.assertThat;\n+import static org.hamcrest.Matchers.equalTo;\n+import static org.testng.Assert.assertFalse;\n+import static org.testng.Assert.assertTrue;\n+\n+public class VersionTypeTests {\n+ @Test\n+ public void testInternalVersionConflict() throws Exception {\n+\n+ assertFalse(VersionType.INTERNAL.isVersionConflict(10, Versions.MATCH_ANY));\n+ // if we don't have a version in the index we accept everything\n+ assertFalse(VersionType.INTERNAL.isVersionConflict(Versions.NOT_SET, 10));\n+ assertFalse(VersionType.INTERNAL.isVersionConflict(Versions.NOT_SET, Versions.MATCH_ANY));\n+\n+ // if we didn't find a version (but the index does support it), we don't like it unless MATCH_ANY\n+ assertTrue(VersionType.INTERNAL.isVersionConflict(Versions.NOT_FOUND, Versions.NOT_FOUND));\n+ assertTrue(VersionType.INTERNAL.isVersionConflict(Versions.NOT_FOUND, 10));\n+ assertFalse(VersionType.INTERNAL.isVersionConflict(Versions.NOT_FOUND, Versions.MATCH_ANY));\n+\n+ // and the stupid usual case\n+ assertFalse(VersionType.INTERNAL.isVersionConflict(10, 10));\n+ assertTrue(VersionType.INTERNAL.isVersionConflict(9, 10));\n+ assertTrue(VersionType.INTERNAL.isVersionConflict(10, 9));\n+\n+// Old indexing code, dictating behavior\n+// if (expectedVersion != Versions.MATCH_ANY && currentVersion != Versions.NOT_SET) {\n+// // an explicit version is provided, see if there is a conflict\n+// // if we did not find anything, and a version is provided, so we do expect to find a doc under that version\n+// // this is important, since we don't allow to preset a version in order to handle deletes\n+// if (currentVersion == Versions.NOT_FOUND) {\n+// throw new VersionConflictEngineException(shardId, index.type(), index.id(), Versions.NOT_FOUND, expectedVersion);\n+// } else if (expectedVersion != currentVersion) {\n+// throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, expectedVersion);\n+// }\n+// }\n+// updatedVersion = (currentVersion == Versions.NOT_SET || currentVersion == Versions.NOT_FOUND) ? 1 : currentVersion + 1;\n+ }\n+\n+ @Test\n+ public void testExternalVersionConflict() throws Exception {\n+\n+ assertFalse(VersionType.EXTERNAL.isVersionConflict(Versions.NOT_FOUND, 10));\n+ assertFalse(VersionType.EXTERNAL.isVersionConflict(Versions.NOT_SET, 10));\n+ // MATCH_ANY must throw an exception in the case of external version, as the version must be set! it used as the new value\n+ assertTrue(VersionType.EXTERNAL.isVersionConflict(10, Versions.MATCH_ANY));\n+\n+ // if we didn't find a version (but the index does support it), we always accept\n+ assertFalse(VersionType.EXTERNAL.isVersionConflict(Versions.NOT_FOUND, Versions.NOT_FOUND));\n+ assertFalse(VersionType.EXTERNAL.isVersionConflict(Versions.NOT_FOUND, 10));\n+ assertFalse(VersionType.EXTERNAL.isVersionConflict(Versions.NOT_FOUND, Versions.MATCH_ANY));\n+\n+ // and the standard behavior\n+ assertTrue(VersionType.EXTERNAL.isVersionConflict(10, 10));\n+ assertFalse(VersionType.EXTERNAL.isVersionConflict(9, 10));\n+ assertTrue(VersionType.EXTERNAL.isVersionConflict(10, 9));\n+\n+\n+// Old indexing code, dictating behavior\n+// // an external version is provided, just check, if a local version exists, that its higher than it\n+// // the actual version checking is one in an external system, and we just want to not index older versions\n+// if (currentVersion >= 0) { // we can check!, its there\n+// if (currentVersion >= index.version()) {\n+// throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, index.version());\n+// }\n+// }\n+// updatedVersion = index.version();\n+ }\n+\n+\n+ @Test\n+ public void testUpdateVersion() {\n+\n+ assertThat(VersionType.INTERNAL.updateVersion(Versions.NOT_SET, 10), equalTo(1l));\n+ assertThat(VersionType.INTERNAL.updateVersion(Versions.NOT_FOUND, 10), equalTo(1l));\n+ assertThat(VersionType.INTERNAL.updateVersion(1, 1), equalTo(2l));\n+ assertThat(VersionType.INTERNAL.updateVersion(2, Versions.MATCH_ANY), equalTo(3l));\n+\n+\n+ assertThat(VersionType.EXTERNAL.updateVersion(Versions.NOT_SET, 10), equalTo(10l));\n+ assertThat(VersionType.EXTERNAL.updateVersion(Versions.NOT_FOUND, 10), equalTo(10l));\n+ assertThat(VersionType.EXTERNAL.updateVersion(1, 10), equalTo(10l));\n+\n+// Old indexing code\n+// if (index.versionType() == VersionType.INTERNAL) { // internal version type\n+// updatedVersion = (currentVersion == Versions.NOT_SET || currentVersion == Versions.NOT_FOUND) ? 1 : currentVersion + 1;\n+// } else { // external version type\n+// updatedVersion = expectedVersion;\n+// }\n+ }\n+}", "filename": "src/test/java/org/elasticsearch/test/unit/index/VersionTypeTests.java", "status": "added" } ] }
{ "body": "Mapping of searched field:\n\n```\nlocation: {\n type: geo_point\n}\n```\n\nI tried also with:\n\n```\nlocation: {\n lat_lon: true,\n type: geo_point,\n geohash: true,\n geohash_precision: 24\n}\n```\n\nQuery:\n\n```\n{\n \"from\": 0,\n \"fields\": [\n \"_id\",\n \"_parent\",\n \"_routing\",\n \"_source\"\n ],\n \"filter\": {\n \"geo_distance\": {\n \"distance\": \"50mi\",\n \"optimize_bbox\": \"memory\",\n \"location\": {\n \"lat\": 40.720611,\n \"lon\": -73.998776\n }\n }\n },\n \"query\": {\n \"match_all\": {}\n },\n \"size\": 20\n}\n```\n\nData:\n\n```\n{\n \"location\": \"dr5rshgwz81eqnfrrrhz\"\n}\n```\n\nIt works for 0.20.2 and don't work for current release 0.90.0\n\nEDIT: Sorry I post it to fast, data attached :)\n", "comments": [ { "body": "Hi @kosz85, can you also post a simple example of your indexed data, filters and results?\n", "created_at": "2013-05-22T13:06:25Z" }, { "body": "I may add that 0.90.0 has no matches even for 5000mi\n\n```\n{\n \"filter\": {\n \"geo_distance\": {\n \"distance\": \"50mi\",\n \"optimize_bbox\": \"memory\",\n \"location\": {\n \"lat\": 40.720611,\n \"lon\": -73.998776\n }\n }\n },\n \"query\": {\n \"match_all\": {}\n },\n \"size\": 20\n}\n```\n\nAlmost the same data in both instances\n\n0.90.0\n\n```\n{\n \"took\": 2,\n \"timed_out\": false,\n \"_shards\": {\n \"total\": 3,\n \"successful\": 3,\n \"failed\": 0\n },\n \"hits\": {\n \"total\": 0,\n \"max_score\": null,\n \"hits\": []\n }\n}\n```\n\n0.20.2\n\n```\n{\n \"took\": 3,\n \"timed_out\": false,\n \"_shards\": {\n \"total\": 5,\n \"successful\": 5,\n \"failed\": 0\n },\n \"hits\": {\n \"total\": 220,\n \"max_score\": 1,\n \"hits\": [\n\n(...)\n {\n \"_id\": \"105\",\n \"_score\": 1,\n \"_source\": {\n \"_id\": 105,\n \"location\": \"dr72hbxdbzk3npz479cy\"\n }\n(...)\n```\n\nExplain on 0.90.0:\n\n```\n{\n \"query\": {\n \"filtered\": {\n \"filter\": {\n \"geo_distance\": {\n \"distance\": \"50mi\",\n \"location\": {\n \"lat\": 40.720611,\n \"lon\": -73.998776\n }\n }\n }\n }\n}\n-------\n{\n \"ok\": true,\n \"_index\": \"business_index\",\n \"_type\": \"business\",\n \"_id\": \"105\",\n \"matched\": false,\n \"explanation\": {\n \"value\": 0,\n \"description\": \"ConstantScore(GeoDistanceFilter(location, ARC, 50.0, 40.720611, -73.998776)) doesn't match id 7\"\n }\n}\n```\n", "created_at": "2013-05-22T13:15:39Z" }, { "body": "@kosz85 thanks for open this issue. I think my commit will fix this\n", "created_at": "2013-06-07T16:14:20Z" }, { "body": "Thanks, fix worked after reindexing data. (We checked it on version 0.90.1 patched with this commit)\n", "created_at": "2013-06-10T07:22:09Z" } ], "number": 3073, "title": "Regresion: geo distance filter - filters out proper geohashes" }
{ "body": "Fixed the `GeoPointFieldMapper` to parse `geohashes` correctly.\n\nCloses #3073\n", "number": 3151, "review_comments": [], "title": "GeoHashes in GeoPointFieldMapper" }
{ "commits": [ { "message": "Fixed the `GeoPointFieldMapper` to parse `geohashes` correctly.\n\nCloses #3073" } ], "files": [ { "diff": "@@ -440,7 +440,7 @@ private void parseGeohash(ParseContext context, String geohash) throws IOExcepti\n }\n }\n \n- context.externalValue(Double.toString(point.lat()) + ',' + Double.toString(point.lat()));\n+ context.externalValue(Double.toString(point.lat()) + ',' + Double.toString(point.lon()));\n geoStringMapper.parse(context);\n if (enableGeoHash) {\n context.externalValue(geohash);", "filename": "src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java", "status": "modified" }, { "diff": "@@ -19,22 +19,29 @@\n \n package org.elasticsearch.test.integration.search.geo;\n \n+import java.io.IOException;\n+\n import org.elasticsearch.action.search.SearchPhaseExecutionException;\n import org.elasticsearch.action.search.SearchResponse;\n import org.elasticsearch.common.Priority;\n import org.elasticsearch.common.geo.GeoDistance;\n+import org.elasticsearch.common.geo.GeoHashUtils;\n import org.elasticsearch.common.unit.DistanceUnit;\n+import org.elasticsearch.common.xcontent.XContentBuilder;\n import org.elasticsearch.common.xcontent.XContentFactory;\n+import org.elasticsearch.common.xcontent.json.JsonXContent;\n+import org.elasticsearch.index.query.FilterBuilders;\n+import org.elasticsearch.index.query.QueryBuilders;\n import org.elasticsearch.rest.RestStatus;\n import org.elasticsearch.search.SearchHit;\n import org.elasticsearch.search.sort.SortBuilders;\n import org.elasticsearch.search.sort.SortOrder;\n import org.elasticsearch.test.integration.AbstractSharedClusterTest;\n-import org.testng.annotations.BeforeTest;\n import org.testng.annotations.Test;\n \n import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;\n import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;\n+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;\n import static org.elasticsearch.index.query.FilterBuilders.*;\n import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;\n import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;\n@@ -688,5 +695,50 @@ public void testDistanceSortingNestedFields() throws Exception {\n assertThat(e.shardFailures()[0].status(), equalTo(RestStatus.BAD_REQUEST));\n }\n }\n+ \n+ /**\n+ * Issue 3073\n+ */\n+ @Test\n+ public void testGeoDistanceFilter() throws IOException {\n+ double lat = 40.720611;\n+ double lon = -73.998776;\n+\n+ XContentBuilder mapping = JsonXContent.contentBuilder()\n+ .startObject()\n+ .startObject(\"location\")\n+ .startObject(\"properties\")\n+ .startObject(\"pin\")\n+ .field(\"type\", \"geo_point\")\n+ .field(\"geohash\", true)\n+ .field(\"geohash_precision\", 24)\n+ .field(\"lat_lon\", true)\n+ .endObject()\n+ .endObject()\n+ .endObject()\n+ .endObject();\n+\n+ XContentBuilder source = JsonXContent.contentBuilder()\n+ .startObject()\n+ .field(\"pin\", GeoHashUtils.encode(lat, lon))\n+ .endObject();\n+ \n+ ensureYellow();\n+ \n+ client().admin().indices().prepareCreate(\"locations\").addMapping(\"location\", mapping).execute().actionGet();\n+ client().prepareIndex(\"locations\", \"location\", \"1\").setCreate(true).setSource(source).execute().actionGet();\n+ client().admin().indices().prepareRefresh(\"locations\").execute().actionGet();\n+ client().prepareGet(\"locations\", \"location\", \"1\").execute().actionGet();\n+\n+ SearchResponse result = client().prepareSearch(\"locations\")\n+ .setQuery(QueryBuilders.matchAllQuery())\n+ .setFilter(FilterBuilders.geoDistanceFilter(\"pin\")\n+ .geoDistance(GeoDistance.ARC)\n+ .lat(lat).lon(lon)\n+ .distance(\"1m\"))\n+ .execute().actionGet();\n+\n+ assertHitCount(result, 1);\n+ } \n \n }\n\\ No newline at end of file", "filename": "src/test/java/org/elasticsearch/test/integration/search/geo/GeoDistanceTests.java", "status": "modified" } ] }
{ "body": "When applying highlight on fields that are declared as stored at index mapping but do not actually exists the following exception occurs (in version 0.90.x)\n\n``` javascript\n{\n \"took\":6,\n \"timed_out\":false,\n \"_shards\":{\n \"total\":3,\n \"successful\":2,\n \"failed\":1,\n \"failures\":[\n {\n \"index\":\"myindex\",\n \"shard\":1,\n \"status\":500,\n \"reason\":\"FetchPhaseExecutionException[[myindex][1]: query[filtered(name:p*)->cache(_type:myindextype)],from[0],size[10]: Fetch Failed [Failed to highlight field [surname]]]; nested: NullPointerException; \"\n }\n ]\n },\n \"hits\":{\n \"total\":1,\n \"max_score\":1.0,\n \"hits\":[\n\n ]\n }\n}.\n```\n\nYou can reproduce the error by doing the following on an empty cluster\n\n``` bash\n # create index \n\ncurl -XPUT 'http://192.168.56.150:9200/myindex/' -d '{\n \"settings\" : {\n \"number_of_shards\" : 3,\n \"number_of_replicas\" : 0\n }\n}'\n\n# create the mapping\n\ncurl -XPUT http://192.168.56.150:9200/myindex/myindextype/_mapping -d '\n { \"myindextype\" : {\"properties\" : { \"name\":{\"type\":\"string\", \"store\":\"yes\", \"analyzer\":\"simple\"}, \"surname\":{\"type\":\"string\", \"store\":\"yes\", \"analyzer\":\"simple\"} } } }\n'\n\n# add a record \ncurl -XPUT http://192.168.56.150:9200/myindex/myindextype/1 -d '{ \"name\":\"panagiotis\" }'\n\n# query with highlighting\n\ncurl -XGET 'http://192.168.56.150:9200/myindex/myindextype/_search' -d '\n{\n \"query\": {\n \"query_string\": {\n \"query\": \"name:p*\"\n }\n },\n \"highlight\": {\n \"order\": \"score\",\n \"fields\": {\n \"name\": {},\n \"surname\": {}\n }\n }\n}\n'\n```\n\nI think that ES should not throw an exception on that case (as did in 0.20.X and earlier versions) due to the schema-free philosophy that is built on. \n\nThe problematic code is **HighlightPhase.java:191** and one easy patch is to to replace that line with the following lines\n\n``` java\nif (fieldVisitor.fields() == null)\n textsToHighlight = new ArrayList<Object>();\nelse\n textsToHighlight = fieldVisitor.fields().get(mapper.names().indexName());\n```\n\nI think that the same logic where in previous versions.\n\nThank you,\nAlex\n", "comments": [ { "body": "Thanks for the comprehensive report! Indeed, this kind of query shouldn't fail, I'll look into it.\n", "created_at": "2013-05-29T13:06:23Z" }, { "body": "thanks adrien!\n", "created_at": "2013-05-30T12:00:06Z" } ], "number": 3109, "title": "Highlighter exception (0.90.0)" }
{ "body": "PlainHighlighter fails at highlighting with a hard exception in case the field\nto highlight is missing. This patch fixes this issue by\n- making FieldsVisitor.fields() return an empty list instead of null when no\n stored field was found,\n- replacing the fields to highlight with an empty list in case they are absent.\n\nCloses: #3109\n", "number": 3113, "review_comments": [], "title": "Don't fail highlighting if the field to highlight is missing." }
{ "commits": [ { "message": "Highlighting shouldn't fail when the field to highlight is absent.\n\nPlainHighlighter fails with a NPE when the field to highlight is marked as\nstored in the mapping but doesn't exist in a hit. This patch makes\nFieldsVisitor.fields less error-prone by returning an empty list instead\nof null when no matching stored field was found.\n\nCloses: #3109" } ], "files": [ { "diff": "@@ -19,6 +19,7 @@\n \n package org.elasticsearch.index.fieldvisitor;\n \n+import com.google.common.collect.ImmutableMap;\n import org.apache.lucene.index.FieldInfo;\n import org.apache.lucene.index.StoredFieldVisitor;\n import org.apache.lucene.util.BytesRef;\n@@ -44,9 +45,6 @@ public abstract class FieldsVisitor extends StoredFieldVisitor {\n protected Map<String, List<Object>> fieldsValues;\n \n public void postProcess(MapperService mapperService) {\n- if (fieldsValues == null || fieldsValues.isEmpty()) {\n- return;\n- }\n if (uid != null) {\n DocumentMapper documentMapper = mapperService.documentMapper(uid.type());\n if (documentMapper != null) {\n@@ -56,7 +54,7 @@ public void postProcess(MapperService mapperService) {\n }\n }\n // can't derive exact mapping type\n- for (Map.Entry<String, List<Object>> entry : fieldsValues.entrySet()) {\n+ for (Map.Entry<String, List<Object>> entry : fields().entrySet()) {\n FieldMappers fieldMappers = mapperService.indexName(entry.getKey());\n if (fieldMappers == null) {\n continue;\n@@ -69,8 +67,8 @@ public void postProcess(MapperService mapperService) {\n }\n \n public void postProcess(DocumentMapper documentMapper) {\n- for (Map.Entry<String, List<Object>> entry : fieldsValues.entrySet()) {\n- FieldMapper fieldMapper = documentMapper.mappers().indexName(entry.getKey()).mapper();\n+ for (Map.Entry<String, List<Object>> entry : fields().entrySet()) {\n+ FieldMapper<?> fieldMapper = documentMapper.mappers().indexName(entry.getKey()).mapper();\n if (fieldMapper == null) {\n continue;\n }\n@@ -128,7 +126,9 @@ public Uid uid() {\n }\n \n public Map<String, List<Object>> fields() {\n- return fieldsValues;\n+ return fieldsValues != null\n+ ? fieldsValues\n+ : ImmutableMap.<String, List<Object>>of();\n }\n \n public void reset() {", "filename": "src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java", "status": "modified" }, { "diff": "@@ -52,7 +52,6 @@\n import org.elasticsearch.search.lookup.SourceLookup;\n \n import java.io.IOException;\n-import java.util.ArrayList;\n import java.util.HashMap;\n import java.util.List;\n import java.util.Map;\n@@ -322,7 +321,7 @@ private GetResult innerGetLoadFromStoredFields(String type, String id, String[]\n }\n source = fieldVisitor.source();\n \n- if (fieldVisitor.fields() != null) {\n+ if (!fieldVisitor.fields().isEmpty()) {\n fieldVisitor.postProcess(docMapper);\n fields = new HashMap<String, GetField>(fieldVisitor.fields().size());\n for (Map.Entry<String, List<Object>> entry : fieldVisitor.fields().entrySet()) {", "filename": "src/main/java/org/elasticsearch/index/get/ShardGetService.java", "status": "modified" }, { "diff": "@@ -145,7 +145,7 @@ public void execute(SearchContext context) {\n fieldsVisitor.postProcess(context.mapperService());\n \n Map<String, SearchHitField> searchFields = null;\n- if (fieldsVisitor.fields() != null) {\n+ if (!fieldsVisitor.fields().isEmpty()) {\n searchFields = new HashMap<String, SearchHitField>(fieldsVisitor.fields().size());\n for (Map.Entry<String, List<Object>> entry : fieldsVisitor.fields().entrySet()) {\n searchFields.put(entry.getKey(), new InternalSearchHitField(entry.getKey(), entry.getValue()));", "filename": "src/main/java/org/elasticsearch/search/fetch/FetchPhase.java", "status": "modified" }, { "diff": "@@ -18,6 +18,8 @@\n */\n package org.elasticsearch.search.highlight;\n \n+import com.google.common.collect.ImmutableList;\n+\n import com.google.common.collect.ImmutableSet;\n import com.google.common.collect.Maps;\n import org.apache.lucene.analysis.Analyzer;\n@@ -58,10 +60,10 @@ public HighlightField highlight(HighlighterContext highlighterContext) {\n Encoder encoder = field.encoder().equals(\"html\") ? Encoders.HTML : Encoders.DEFAULT;\n \n if (!hitContext.cache().containsKey(CACHE_KEY)) {\n- Map<FieldMapper, org.apache.lucene.search.highlight.Highlighter> mappers = Maps.newHashMap();\n+ Map<FieldMapper<?>, org.apache.lucene.search.highlight.Highlighter> mappers = Maps.newHashMap();\n hitContext.cache().put(CACHE_KEY, mappers);\n }\n- Map<FieldMapper, org.apache.lucene.search.highlight.Highlighter> cache = (Map<FieldMapper, org.apache.lucene.search.highlight.Highlighter>) hitContext.cache().get(CACHE_KEY);\n+ Map<FieldMapper<?>, org.apache.lucene.search.highlight.Highlighter> cache = (Map<FieldMapper<?>, org.apache.lucene.search.highlight.Highlighter>) hitContext.cache().get(CACHE_KEY);\n \n org.apache.lucene.search.highlight.Highlighter entry = cache.get(mapper);\n if (entry == null) {\n@@ -98,6 +100,10 @@ public HighlightField highlight(HighlighterContext highlighterContext) {\n CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(ImmutableSet.of(mapper.names().indexName()), false);\n hitContext.reader().document(hitContext.docId(), fieldVisitor);\n textsToHighlight = fieldVisitor.fields().get(mapper.names().indexName());\n+ if (textsToHighlight == null) {\n+ // Can happen if the document doesn't have the field to highlight\n+ textsToHighlight = ImmutableList.of();\n+ }\n } catch (Exception e) {\n throw new FetchPhaseExecutionException(context, \"Failed to highlight field [\" + highlighterContext.fieldName + \"]\", e);\n }\n@@ -107,6 +113,7 @@ public HighlightField highlight(HighlighterContext highlighterContext) {\n lookup.setNextDocId(hitContext.docId());\n textsToHighlight = lookup.source().extractRawValues(mapper.names().sourcePath());\n }\n+ assert textsToHighlight != null;\n \n // a HACK to make highlighter do highlighting, even though its using the single frag list builder\n int numberOfFragments = field.numberOfFragments() == 0 ? 1 : field.numberOfFragments();", "filename": "src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java", "status": "modified" }, { "diff": "@@ -1503,4 +1503,34 @@ public void testPlainHighlightDifferentFragmenter() throws Exception {\n }\n }\n \n+ @Test\n+ public void testMissingStoredField() throws Exception {\n+ try {\n+ client.admin().indices().prepareDelete(\"test\").execute().actionGet();\n+ } catch (Exception e) {\n+ // ignore\n+ }\n+\n+ client.admin().indices().prepareCreate(\"test\").setSettings(ImmutableSettings.settingsBuilder()\n+ .put(\"index.number_of_shards\", 1).put(\"index.number_of_replicas\", 0))\n+ .addMapping(\"type1\", jsonBuilder().startObject().startObject(\"type1\").startObject(\"properties\")\n+ .startObject(\"highlight_field\").field(\"type\", \"string\").field(\"store\", \"yes\").endObject()\n+ .endObject().endObject().endObject())\n+ .execute().actionGet();\n+\n+ client.prepareIndex(\"test\", \"type1\", \"1\")\n+ .setSource(jsonBuilder().startObject()\n+ .field(\"field\", \"highlight\")\n+ .endObject())\n+ .setRefresh(true).execute().actionGet();\n+\n+ // This query used to fail when the field to highlight was absent\n+ SearchResponse response = client.prepareSearch(\"test\")\n+ .setQuery(QueryBuilders.matchQuery(\"field\", \"highlight\").type(MatchQueryBuilder.Type.BOOLEAN))\n+ .addHighlightedField(new HighlightBuilder.Field(\"highlight_field\")\n+ .fragmentSize(-1).numOfFragments(1).fragmenter(\"simple\"))\n+ .execute().actionGet();\n+ assertThat(response.getHits().hits()[0].highlightFields().isEmpty(), equalTo(true));\n+ }\n+\n }", "filename": "src/test/java/org/elasticsearch/test/integration/search/highlight/HighlighterSearchTests.java", "status": "modified" } ] }
{ "body": "if given some document date field that is a basic date, such as\n{\n\"doc_field\" : \"20130425\"\n}\nand _timestamp uses it as its path, it defaults to parsing the integer as ms rather than as the format\n_timestamp{\n\"format\":\"yyyyMMdd\",\n\"enabled\":true\n}\n", "comments": [ { "body": "Closing in favour of #10971\n", "created_at": "2015-06-23T18:21:16Z" } ], "number": 2937, "title": "Timestamp defaults to parsing int even if given a format" }
{ "body": "The current implementation tries to parse every timestamp as a long first\nand uses it as a unix timestamp.\nThis fails if the configured timestamp is like 'YYYYMMDD' and also\nresembles a long by coincidence.\n\nThis PR tries to fix this issue by trying to parse it as a date first and\nonly it fails is tried to be parsed as a long from unix timestamp.\n\nPossible problems:\n- Performance: Reversing the order might make things slower.\n- Wrong parsing: An additional check was added to make sure no negative\n unix timestamps can be generated.\n\nCloses #2937\n", "number": 3072, "review_comments": [], "title": "Inverting logic of parsing timestamp fields" }
{ "commits": [ { "message": "Stop treating every timestamp as an integer first\n\nThe current implementation tries to parse every timestamp as a long first\nand uses it as a unix timestamp.\nThis fails if the configured timestamp is like 'YYYYMMDD' and also\nresembles a long by coincidence.\n\nThis PR tries to fix this issue by trying to parse it as a date first and\nonly it fails is tried to be parsed as a long from unix timestamp.\n\nPossible problems:\n* Performance: Reversing the order might make things slower.\n* Wrong parsing: An additional check was added to make sure no negative\n unix timestamps can be generated.\n\nCloses #2937" } ], "files": [ { "diff": "@@ -161,19 +161,26 @@ public static class Timestamp {\n public static String parseStringTimestamp(String timestampAsString, FormatDateTimeFormatter dateTimeFormatter) throws TimestampParsingException {\n long ts;\n try {\n- // if we manage to parse it, its a millisecond timestamp, just return the string as is\n- ts = Long.parseLong(timestampAsString);\n- return timestampAsString;\n- } catch (NumberFormatException e) {\n- try {\n- ts = dateTimeFormatter.parser().parseMillis(timestampAsString);\n- } catch (RuntimeException e1) {\n- throw new TimestampParsingException(timestampAsString);\n+ ts = dateTimeFormatter.parser().parseMillis(timestampAsString);\n+ // ugly workaround in order to not create negative timestamps, but rather treat it as unix timestamp\n+ if (ts < 0) {\n+ ts = parseLong(timestampAsString);\n }\n+ } catch (RuntimeException e) {\n+ ts = parseLong(timestampAsString);\n }\n return Long.toString(ts);\n }\n \n+ private static Long parseLong(String timestampAsString) {\n+ try {\n+ // if we manage to parse it, its a millisecond timestamp, just return the string as is\n+ return Long.parseLong(timestampAsString);\n+ } catch (NumberFormatException e1) {\n+ throw new TimestampParsingException(timestampAsString);\n+ }\n+ }\n+\n \n public static final Timestamp EMPTY = new Timestamp(false, null, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT);\n ", "filename": "src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java", "status": "modified" }, { "diff": "@@ -20,14 +20,17 @@\n package org.elasticsearch.test.integration.timestamp;\n \n import org.elasticsearch.action.get.GetResponse;\n+import org.elasticsearch.action.search.SearchResponse;\n import org.elasticsearch.client.Client;\n import org.elasticsearch.common.Priority;\n import org.elasticsearch.common.xcontent.XContentFactory;\n+import org.elasticsearch.index.query.QueryBuilders;\n import org.elasticsearch.test.integration.AbstractNodesTests;\n import org.testng.annotations.AfterClass;\n import org.testng.annotations.BeforeClass;\n import org.testng.annotations.Test;\n \n+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;\n import static org.hamcrest.MatcherAssert.assertThat;\n import static org.hamcrest.Matchers.*;\n \n@@ -106,4 +109,29 @@ public void testSimpleTimestamp() throws Exception {\n getResponse = client.prepareGet(\"test\", \"type1\", \"1\").setFields(\"_timestamp\").setRealtime(false).execute().actionGet();\n assertThat(((Number) getResponse.getField(\"_timestamp\").getValue()).longValue(), equalTo(timestamp));\n }\n+\n+ @Test\n+ public void testThatDateFormatsAreNotMistakenlyParsedAsLong() throws Exception {\n+ client.admin().indices().prepareDelete().execute().actionGet();\n+\n+ client.admin().indices().prepareCreate(\"test\")\n+ .addMapping(\"type1\", XContentFactory.jsonBuilder()\n+ .startObject()\n+ .startObject(\"type1\")\n+ .startObject(\"_timestamp\")\n+ .field(\"enabled\", true)\n+ .field(\"format\", \"YYYYMMdd\")\n+ .field(\"path\", \"myDate\")\n+ .endObject()\n+ .endObject()\n+ .endObject())\n+ .execute().actionGet();\n+ client.admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();\n+\n+ client.prepareIndex(\"test\", \"type1\", \"1\").setSource(\"myDate\", \"20130416\").setRefresh(true).execute().actionGet();\n+\n+ // check for _timestamp field with query, if the above is parsed as long, then query will not match\n+ SearchResponse searchResponse = client.prepareSearch(\"test\").setTypes(\"type1\").setQuery(QueryBuilders.rangeQuery(\"_timestamp\").from(\"20130415\").to(\"20130417\")).execute().actionGet();\n+ assertHitCount(searchResponse, 1);\n+ }\n }", "filename": "src/test/java/org/elasticsearch/test/integration/timestamp/SimpleTimestampTests.java", "status": "modified" } ] }
{ "body": "Using elasticsearch version 0.90 - windows binaries.\n\nWhen I try to percolate an item for an index/type that has a default _ttl defined, I get the following error: \n\n```\n{\n \"error\":\n \"MapperParsingException[failed to parse [_ttl]]; nested: AlreadyExpiredException[already expired [test]/[type1]/[null] due to expire at [5184000000] and was processed at [1367509482308]]; \",\n \"status\":400\n}\n```\n\nHere are the steps to recreate the error:\n\n```\ncurl -XPUT localhost:9200/test\n\ncurl -XPUT localhost:9200/test/type1/_mapping -d '{ \n \"type1\": { \n \"_ttl\": { \n \"enabled\": true,\n \"default\": \"60d\"\n },\n \"_timestamp\": { \"enabled\": true } \n }\n }'\n\ncurl -XPUT localhost:9200/_percolator/test/kuku -d '{ \n \"query\" : { \n \"term\" : { \n \"field1\" : \"value1\" \n } \n }\n }'\n\ncurl -XGET localhost:9200/test/type1/_percolate -d '{ \"doc\" : { \"field1\" : \"value1\" }}'\n```\n", "comments": [], "number": 2975, "title": "Percolating an item of a type that has a default _ttl mapping configured throws an error" }
{ "body": "When a type is configured with a TTL, percolation of documents of this type\nwas not possible and threw an exception. This fix ignores the TTL for percolation instead of\nthrowing an exception that the document is already expired.\n\nCloses #2975\n", "number": 3067, "review_comments": [], "title": "Fixing percolation of documents with TTL set" }
{ "commits": [ { "message": "Fixing percolation of documents with TTL set\n\nWhen a type is configured with a TTL, percolation of documents of this type\nwas not possible. This fix ignores the TTL for percolation instead of\nthrowing an exception that the document is already expired.\n\nCloses #2975" } ], "files": [ { "diff": "@@ -190,7 +190,7 @@ public boolean includeInObject() {\n \n @Override\n protected Field innerParseCreateField(ParseContext context) throws IOException, AlreadyExpiredException {\n- if (enabledState.enabled) {\n+ if (enabledState.enabled && !context.sourceToParse().flyweight()) {\n long ttl = context.sourceToParse().ttl();\n if (ttl <= 0 && defaultTTL > 0) { // no ttl provided so we use the default value\n ttl = defaultTTL;", "filename": "src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java", "status": "modified" }, { "diff": "@@ -486,4 +486,47 @@ public void percolateWithSizeField() throws Exception {\n assertThat(percolate.getMatches(), hasItem(\"kuku\"));\n }\n \n+ @Test\n+ public void testThatPercolatingWithTimeToLiveWorks() throws Exception {\n+ try {\n+ client.admin().indices().prepareDelete(\"test\").execute().actionGet();\n+ } catch (Exception e) {\n+ // ignore\n+ }\n+ try {\n+ client.admin().indices().prepareDelete(\"_percolator\").execute().actionGet();\n+ } catch (Exception e) {\n+ // ignore\n+ }\n+\n+ String mapping = XContentFactory.jsonBuilder().startObject().startObject(\"type1\")\n+ .startObject(\"_ttl\").field(\"enabled\", true).field(\"default\", \"60d\").endObject()\n+ .startObject(\"_timestamp\").field(\"enabled\", true).endObject()\n+ .endObject().endObject().string();\n+\n+ client.admin().indices().prepareCreate(\"test\")\n+ .setSettings(settingsBuilder().put(\"index.number_of_shards\", 2))\n+ .addMapping(\"type1\", mapping)\n+ .execute().actionGet();\n+ client.admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();\n+\n+ client.prepareIndex(\"_percolator\", \"test\", \"kuku\").setSource(jsonBuilder()\n+ .startObject()\n+ .startObject(\"query\")\n+ .startObject(\"term\")\n+ .field(\"field1\", \"value1\")\n+ .endObject()\n+ .endObject()\n+ .endObject()\n+ ).execute().actionGet();\n+\n+ PercolateResponse percolateResponse = client.preparePercolate(\"test\", \"type1\").setSource(jsonBuilder()\n+ .startObject()\n+ .startObject(\"doc\")\n+ .field(\"field1\", \"value1\")\n+ .endObject()\n+ .endObject()\n+ ).execute().actionGet();\n+ assertThat(percolateResponse.getMatches(), hasItem(\"kuku\"));\n+ }\n }", "filename": "src/test/java/org/elasticsearch/test/integration/percolator/SimplePercolatorTests.java", "status": "modified" } ] }
{ "body": "In the following query, we try to boost the FirstLast field by 7 and it fails:\n\n/_search?q=(FirstLast%3A\"johnsmith\")^7&explain=true\n...\n_explanation\": {\n\n```\n\"value\": 10,\n\"description\": \"weight(FirstLast:johnsmith in 0) [PerFieldSimilarity], result of:\"\n```\n\nThis behavior works correctly in the MultiMatchQueryParser:\n_search?q=(FirstLast%3A\"johnsmith\")^7+(State%3A\"wa\")&explain=true\n...\n\"_explanation\": {\n\n```\n\"value\": 80,\n\"description\": \"sum of:\",\n\"details\": [\n {\n \"value\": 70,\n \"description\": \"weight(FirstLast:johnsmith^7.0 in 0) [PerFieldSimilarity], result of:\",\n```\n\nNote that the boost does work when the request is a _POST as so:\n_search\" -d '{\"query\":{\"term\":{\"State\":{\"value\":\"wa\",\"boost\":7.0}}}}'\n{\"took\":2,\"timed_out\":false,\"_shards\":{\"total\":1,\"successful\":1,\"failed\":0},\"hits\":{\"total\":1,\"max_score\":70.0,\"hits\":[{\"_index\":\"3\",\"_type\":\"people\",\"_id\":\"XXXXXXXXXX\",\"_score\":70.0, \"_source\" : {\"FirstLast\":[\"johnsmith\"],\"State\":[\"wa\"]}}]}}\n\nI'm just getting used to the code and have no debugger setup yet so forgive me if I'm mistaken or incorrectly using this.\n", "comments": [ { "body": "Tracked this down. Looks like the parsed query boost is getting reset to the default of 1.0 on this line:\n\nhttps://github.com/elasticsearch/elasticsearch/blob/master/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java#L217\n\n@kimchy @s1monw Should this line look something like the following?\n\n```\nquery.setBoost(query.getBoost() * qpSettings.boost());\n```\n\nIf yes, I can open a pull request.\n\nThanks,\nMatt Weber\n", "created_at": "2013-05-20T22:05:20Z" }, { "body": "yeah so the reason why this fails is that the query that we boost with the qpSettings is the topLevelQuery. The fix looks good to me. I will add some more tests and pull it in! thanks matt\n", "created_at": "2013-05-21T08:35:51Z" } ], "number": 3024, "title": "MatchQueryParser doesn't allow field boosting on query when included in a _GET request" }
{ "body": "Set the query boost of a parsed query string query to the product of\nthe parsed query boost and the boost value specified in the \"boost\"\nquery string parameter.\n\nfixes #3024\n", "number": 3065, "review_comments": [], "title": "QueryStringQuery overwrites parsed boost value" }
{ "commits": [ { "message": "QueryStringQuery overwrites parsed boost value\n\nSet the query boost of a parsed query string query to the product of\nthe parsed query boost and the boost value specified in the \"boost\"\nquery string parameter.\n\nfixes #3024" } ], "files": [ { "diff": "@@ -214,7 +214,7 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n if (query == null) {\n return null;\n }\n- query.setBoost(qpSettings.boost());\n+ query.setBoost(query.getBoost() * qpSettings.boost());\n query = optimizeQuery(fixNegativeQueryIfNeeded(query));\n if (query instanceof BooleanQuery) {\n Queries.applyMinimumShouldMatch((BooleanQuery) query, qpSettings.minimumShouldMatch());", "filename": "src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java", "status": "modified" }, { "diff": "@@ -54,6 +54,7 @@\n import org.elasticsearch.index.query.IndexQueryParserModule;\n import org.elasticsearch.index.query.IndexQueryParserService;\n import org.elasticsearch.index.query.ParsedQuery;\n+import org.elasticsearch.index.query.QueryStringQueryBuilder;\n import org.elasticsearch.index.search.NumericRangeFieldDataFilter;\n import org.elasticsearch.index.search.geo.GeoDistanceFilter;\n import org.elasticsearch.index.search.geo.GeoPolygonFilter;\n@@ -160,6 +161,19 @@ public void testQueryString() throws Exception {\n assertThat(termQuery.getTerm(), equalTo(new Term(\"content\", \"test\")));\n }\n \n+ @Test\n+ public void testQueryStringBoostsBuilder() throws Exception {\n+ IndexQueryParserService queryParser = queryParser();\n+ QueryStringQueryBuilder builder = queryString(\"field:boosted^2\");\n+ Query parsedQuery = queryParser.parse(builder).query();\n+ assertThat(parsedQuery, instanceOf(TermQuery.class));\n+ assertThat(((TermQuery) parsedQuery).getTerm(), equalTo(new Term(\"field\", \"boosted\")));\n+ assertThat(parsedQuery.getBoost(), equalTo(2.0f));\n+ builder.boost(2.0f);\n+ parsedQuery = queryParser.parse(builder).query();\n+ assertThat(parsedQuery.getBoost(), equalTo(4.0f));\n+ }\n+ \n @Test\n public void testQueryStringFields1Builder() throws Exception {\n IndexQueryParserService queryParser = queryParser();", "filename": "src/test/java/org/elasticsearch/test/unit/index/query/SimpleIndexQueryParserTests.java", "status": "modified" } ] }
{ "body": "Using [URI reqeust](http://www.elasticsearch.org/guide/reference/api/search/uri-request/) queries, the _track_scores_ parameter doesn't work.\n\nHere's a curl recreation of the problem:\nhttps://gist.github.com/radu-gheorghe/5476348\n\n@clintongormley confirmed and says it works with query body types of queries. Thanks, Clint!\n", "comments": [ { "body": "It's weird, but I'm still having the same problem.\nVersion 0.90.5, installed from .deb Any idea why? \n", "created_at": "2013-10-10T15:11:48Z" }, { "body": "I think this has never been ported to 0.90.x I will look into it!\n", "created_at": "2013-11-12T11:08:01Z" }, { "body": "pushed to 0.90 - this should be in `0.90.7`\n", "created_at": "2013-11-12T11:21:13Z" } ], "number": 2986, "title": "track_scores doesn't work with URI request queries" }
{ "body": "The `track_scores` parameter is now parsed by the REST handler\n\nCloses #2986\n", "number": 3009, "review_comments": [], "title": "Fixed parsing of track_scores in RestSearchAction" }
{ "commits": [ { "message": "Fixed parsing of `track_scores` in `RestSearchAction`\n\nCloses #2986" } ], "files": [ { "diff": "@@ -226,6 +226,13 @@ private SearchSourceBuilder parseSearchSource(RestRequest request) {\n }\n }\n \n+ if(request.hasParam(\"track_scores\")) {\n+ if (searchSourceBuilder == null) {\n+ searchSourceBuilder = new SearchSourceBuilder();\n+ }\n+ searchSourceBuilder.trackScores(request.paramAsBoolean(\"track_scores\", false));\n+ }\n+\n String sSorts = request.param(\"sort\");\n if (sSorts != null) {\n if (searchSourceBuilder == null) {", "filename": "src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java", "status": "modified" }, { "diff": "@@ -167,7 +167,32 @@ public void testScoreSortDirection() throws Exception {\n assertThat(searchResponse.getHits().getAt(1).getId(), equalTo(\"2\"));\n assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(\"1\"));\n }\n- \n+\n+ @Test\n+ public void testIssue2986() {\n+ try {\n+ client.admin().indices().prepareDelete(\"test\").execute().actionGet();\n+ } catch (Exception e) {\n+ // ignore\n+ }\n+\n+ client.admin().indices().prepareCreate(\"test\").execute().actionGet();\n+\n+ client.prepareIndex(\"test\", \"post\", \"1\").setSource(\"{\\\"field1\\\":\\\"value1\\\"}\").execute().actionGet();\n+ client.prepareIndex(\"test\", \"post\", \"2\").setSource(\"{\\\"field1\\\":\\\"value2\\\"}\").execute().actionGet();\n+ client.prepareIndex(\"test\", \"post\", \"3\").setSource(\"{\\\"field1\\\":\\\"value3\\\"}\").execute().actionGet();\n+\n+ client.admin().indices().prepareRefresh(\"test\").execute().actionGet();\n+\n+ SearchResponse result = client.prepareSearch(\"test\").setQuery(matchAllQuery()).setTrackScores(true).addSort(\"field1\", SortOrder.ASC).execute().actionGet();\n+\n+ for (SearchHit hit : result.getHits()) {\n+ assert !Float.isNaN(hit.getScore());\n+ }\n+\n+ client.admin().indices().prepareDelete(\"test\").execute().actionGet();\n+ }\n+\n @Test\n public void testIssue2991() {\n for (int i = 1; i < 4; i++) {", "filename": "src/test/java/org/elasticsearch/test/integration/search/sort/SimpleSortTests.java", "status": "modified" } ] }
{ "body": "See the following two scripts to reproduce:\nhttps://gist.github.com/dakrone/5528301\nhttps://gist.github.com/dakrone/5528298\n\nWhen run:\n\n```\n∴ ./broken-get.zsh \n{\"ok\":true,\"acknowledged\":true}{\"ok\":true,\"acknowledged\":true}\n{\"ok\":true,\"_index\":\"get-test\",\"_type\":\"doc\",\"_id\":\"1\",\"_version\":1}\n{\"ok\":true,\"_index\":\"get-test\",\"_type\":\"doc\",\"_id\":\"2\",\"_version\":1}\n{\n \"_index\" : \"get-test\",\n \"_type\" : \"doc\",\n \"_id\" : \"1\",\n \"_version\" : 1,\n \"exists\" : true, \"_source\" : {\"date\":\"2010-01-01\"}\n}\n{\n \"_index\" : \"get-test\",\n \"_type\" : \"doc\",\n \"_id\" : \"2\",\n \"_version\" : 1,\n \"exists\" : true, \"_source\" : {\"date\":[\"2010-01-01\",\"2011-01-01\"]}\n}\n{\n \"error\" : \"MapperParsingException[failed to parse date field [[2010-01-01, 2011-01-01]], tried both date format [yyyy-MM-dd], and timestamp number]; nested: IllegalArgumentException[Invalid format: \\\"[2010-01-01, 2011-01-01]\\\"]; \",\n \"status\" : 400\n}\n```\n\nand:\n\n```\n∴ ./broken-get2.zsh \n{\"ok\":true,\"acknowledged\":true}{\"ok\":true,\"acknowledged\":true}\n{\"ok\":true,\"_index\":\"get-test\",\"_type\":\"doc\",\"_id\":\"1\",\"_version\":1}\n{\"ok\":true,\"_index\":\"get-test\",\"_type\":\"doc\",\"_id\":\"2\",\"_version\":1}\n{\n \"_index\" : \"get-test\",\n \"_type\" : \"doc\",\n \"_id\" : \"1\",\n \"_version\" : 1,\n \"exists\" : true, \"_source\" : {\"num\":2}\n}\n{\n \"_index\" : \"get-test\",\n \"_type\" : \"doc\",\n \"_id\" : \"2\",\n \"_version\" : 1,\n \"exists\" : true, \"_source\" : {\"num\":[2,1]}\n}\n{\n \"error\" : \"NumberFormatException[For input string: \\\"[2, 1]\\\"]\",\n \"status\" : 500\n}\n```\n", "comments": [ { "body": "Forgot to mention, this is on vanilla ES 0.90.0.\n", "created_at": "2013-05-06T21:25:12Z" } ], "number": 3000, "title": "Get doc fails for some array fields" }
{ "body": "Fixes #3000\n", "number": 3002, "review_comments": [], "title": "Fix error getting array fields" }
{ "commits": [ { "message": "Fix error getting array fields\n\nFixes #3000" } ], "files": [ { "diff": "@@ -251,7 +251,14 @@ public GetResult innerGet(String type, String id, String[] gFields, boolean real\n value = searchLookup.source().extractValue(field);\n // normalize the data if needed (mainly for binary fields, to convert from base64 strings to bytes)\n if (value != null && x != null) {\n- value = x.valueForSearch(value);\n+ if (value instanceof List) {\n+ List list = (List) value;\n+ for (int i = 0; i < list.size(); i++) {\n+ list.set(i, x.valueForSearch(list.get(i)));\n+ }\n+ } else {\n+ value = x.valueForSearch(value);\n+ }\n }\n }\n }\n@@ -356,7 +363,14 @@ private GetResult innerGetLoadFromStoredFields(String type, String id, String[]\n value = searchLookup.source().extractValue(field);\n // normalize the data if needed (mainly for binary fields, to convert from base64 strings to bytes)\n if (value != null && x != null) {\n- value = x.mapper().valueForSearch(value);\n+ if (value instanceof List) {\n+ List list = (List) value;\n+ for (int i = 0; i < list.size(); i++) {\n+ list.set(i, x.mapper().valueForSearch(list.get(i)));\n+ }\n+ } else {\n+ value = x.mapper().valueForSearch(value);\n+ }\n }\n }\n }", "filename": "src/main/java/org/elasticsearch/index/get/ShardGetService.java", "status": "modified" }, { "diff": "@@ -238,7 +238,9 @@ public void getFieldsWithDifferentTypes() throws Exception {\n .startObject(\"_source\").field(\"enabled\", false).endObject()\n .startObject(\"properties\")\n .startObject(\"str\").field(\"type\", \"string\").field(\"store\", \"yes\").endObject()\n+ .startObject(\"strs\").field(\"type\", \"string\").field(\"store\", \"yes\").endObject()\n .startObject(\"int\").field(\"type\", \"integer\").field(\"store\", \"yes\").endObject()\n+ .startObject(\"ints\").field(\"type\", \"integer\").field(\"store\", \"yes\").endObject()\n .startObject(\"date\").field(\"type\", \"date\").field(\"store\", \"yes\").endObject()\n .startObject(\"binary\").field(\"type\", \"binary\").field(\"store\", \"yes\").endObject()\n .endObject()\n@@ -249,42 +251,67 @@ public void getFieldsWithDifferentTypes() throws Exception {\n assertThat(clusterHealth.isTimedOut(), equalTo(false));\n assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN));\n \n- client.prepareIndex(\"test\", \"type1\", \"1\").setSource(\"str\", \"test\", \"int\", 42, \"date\", \"2012-11-13T15:26:14.000Z\", \"binary\", Base64.encodeBytes(new byte[]{1, 2, 3})).execute().actionGet();\n- client.prepareIndex(\"test\", \"type2\", \"1\").setSource(\"str\", \"test\", \"int\", 42, \"date\", \"2012-11-13T15:26:14.000Z\", \"binary\", Base64.encodeBytes(new byte[]{1, 2, 3})).execute().actionGet();\n+ client.prepareIndex(\"test\", \"type1\", \"1\").setSource(\n+ jsonBuilder().startObject()\n+ .field(\"str\", \"test\")\n+ .field(\"strs\", new String[]{\"A\", \"B\", \"C\"})\n+ .field(\"int\", 42)\n+ .field(\"ints\", new int[]{1, 2, 3, 4})\n+ .field(\"date\", \"2012-11-13T15:26:14.000Z\")\n+ .field(\"binary\", Base64.encodeBytes(new byte[]{1, 2, 3}))\n+ .endObject()).execute().actionGet();\n+\n+ client.prepareIndex(\"test\", \"type2\", \"1\").setSource(\n+ jsonBuilder().startObject()\n+ .field(\"str\", \"test\")\n+ .field(\"strs\", new String[]{\"A\", \"B\", \"C\"})\n+ .field(\"int\", 42)\n+ .field(\"ints\", new int[]{1, 2, 3, 4})\n+ .field(\"date\", \"2012-11-13T15:26:14.000Z\")\n+ .field(\"binary\", Base64.encodeBytes(new byte[]{1, 2, 3}))\n+ .endObject()).execute().actionGet();\n \n // realtime get with stored source\n logger.info(\"--> realtime get (from source)\");\n- GetResponse getResponse = client.prepareGet(\"test\", \"type1\", \"1\").setFields(\"str\", \"int\", \"date\", \"binary\").execute().actionGet();\n+ GetResponse getResponse = client.prepareGet(\"test\", \"type1\", \"1\").setFields(\"str\", \"strs\", \"int\", \"ints\", \"date\", \"binary\").execute().actionGet();\n assertThat(getResponse.isExists(), equalTo(true));\n assertThat((String) getResponse.getField(\"str\").getValue(), equalTo(\"test\"));\n+ assertThat((List<String>) getResponse.getField(\"strs\").getValue(), contains(\"A\", \"B\", \"C\"));\n assertThat((Long) getResponse.getField(\"int\").getValue(), equalTo(42l));\n+ assertThat((List<Long>) getResponse.getField(\"ints\").getValue(), contains(1L, 2L, 3L, 4L));\n assertThat((String) getResponse.getField(\"date\").getValue(), equalTo(\"2012-11-13T15:26:14.000Z\"));\n assertThat(getResponse.getField(\"binary\").getValue(), instanceOf(String.class)); // its a String..., not binary mapped\n \n logger.info(\"--> realtime get (from stored fields)\");\n- getResponse = client.prepareGet(\"test\", \"type2\", \"1\").setFields(\"str\", \"int\", \"date\", \"binary\").execute().actionGet();\n+ getResponse = client.prepareGet(\"test\", \"type2\", \"1\").setFields(\"str\", \"strs\", \"int\", \"ints\", \"date\", \"binary\").execute().actionGet();\n assertThat(getResponse.isExists(), equalTo(true));\n assertThat((String) getResponse.getField(\"str\").getValue(), equalTo(\"test\"));\n+ assertThat((List<String>) getResponse.getField(\"strs\").getValue(), contains(\"A\", \"B\", \"C\"));\n assertThat((Integer) getResponse.getField(\"int\").getValue(), equalTo(42));\n+ assertThat((List<Integer>) getResponse.getField(\"ints\").getValue(), contains(1, 2, 3, 4));\n assertThat((String) getResponse.getField(\"date\").getValue(), equalTo(\"2012-11-13T15:26:14.000Z\"));\n assertThat((BytesReference) getResponse.getField(\"binary\").getValue(), equalTo((BytesReference) new BytesArray(new byte[]{1, 2, 3})));\n \n logger.info(\"--> flush the index, so we load it from it\");\n client.admin().indices().prepareFlush().execute().actionGet();\n \n logger.info(\"--> non realtime get (from source)\");\n- getResponse = client.prepareGet(\"test\", \"type1\", \"1\").setFields(\"str\", \"int\", \"date\", \"binary\").execute().actionGet();\n+ getResponse = client.prepareGet(\"test\", \"type1\", \"1\").setFields(\"str\", \"strs\", \"int\", \"ints\", \"date\", \"binary\").execute().actionGet();\n assertThat(getResponse.isExists(), equalTo(true));\n assertThat((String) getResponse.getField(\"str\").getValue(), equalTo(\"test\"));\n+ assertThat((List<String>) getResponse.getField(\"strs\").getValue(), contains(\"A\", \"B\", \"C\"));\n assertThat((Long) getResponse.getField(\"int\").getValue(), equalTo(42l));\n+ assertThat((List<Long>) getResponse.getField(\"ints\").getValue(), contains(1L, 2L, 3L, 4L));\n assertThat((String) getResponse.getField(\"date\").getValue(), equalTo(\"2012-11-13T15:26:14.000Z\"));\n assertThat(getResponse.getField(\"binary\").getValue(), instanceOf(String.class)); // its a String..., not binary mapped\n \n logger.info(\"--> non realtime get (from stored fields)\");\n- getResponse = client.prepareGet(\"test\", \"type2\", \"1\").setFields(\"str\", \"int\", \"date\", \"binary\").execute().actionGet();\n+ getResponse = client.prepareGet(\"test\", \"type2\", \"1\").setFields(\"str\", \"strs\", \"int\", \"ints\", \"date\", \"binary\").execute().actionGet();\n assertThat(getResponse.isExists(), equalTo(true));\n assertThat((String) getResponse.getField(\"str\").getValue(), equalTo(\"test\"));\n+ assertThat(getResponse.getField(\"strs\").getValues(), contains((Object) \"A\", \"B\", \"C\"));\n assertThat((Integer) getResponse.getField(\"int\").getValue(), equalTo(42));\n+ assertThat(getResponse.getField(\"ints\").getValues(), contains((Object) 1, 2, 3, 4));\n assertThat((String) getResponse.getField(\"date\").getValue(), equalTo(\"2012-11-13T15:26:14.000Z\"));\n assertThat((BytesReference) getResponse.getField(\"binary\").getValue(), equalTo((BytesReference) new BytesArray(new byte[]{1, 2, 3})));\n }", "filename": "src/test/java/org/elasticsearch/test/integration/get/GetActionTests.java", "status": "modified" } ] }
{ "body": "today if two nodes have very similar weights but only differ in the number of primaries a relocation can happen due to tie-breaking on the primaries per node. This might happen only if lots of relocations have happened before but still can trigger a unnecessary relocation.\n", "comments": [], "number": 2984, "title": "PrimaryBalance in BalancedShardsAllocator can trigger unneeded relocation" }
{ "body": "This allows dedicated weight calculations per operation. In certain\ncircumstance it is more efficient / required to ignore certain factors in the weight\ncalculation to prevent for instance relocations if they are solely triggered by tie-breakers.\nIn particular the primary balance property should not be taken into account if the delta for\nearly termination is calculated since otherwise a relocation could be triggered solely by the\nfact that two nodes have different amount of primaries allocated to them.\n\nCloses #2984\n", "number": 2985, "review_comments": [], "title": "Introduced a Opertaion enum that is passed to each call of WeightFunction#weight" }
{ "commits": [ { "message": "Introduced a Opertaion enum that is passed to each call of\nWeightFunction#weight to allow dedicated weight calculations per operation. In certain\ncircumstance it is more efficient / required to ignore certain factors in the weight\ncalculation to prevent for instance relocations if they are solely triggered by tie-breakers.\nIn particular the primary balance property should not be taken into account if the delta for\nearly termination is calculated since otherwise a relocation could be triggered solely by the\nfact that two nodes have different amount of primaries allocated to them.\n\nCloses #2984" } ], "files": [ { "diff": "@@ -74,9 +74,9 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards\n class ApplySettings implements NodeSettingsService.Listener {\n @Override\n public void onRefreshSettings(Settings settings) {\n- float indexBalance = settings.getAsFloat(SETTING_INDEX_BALANCE_FACTOR, weightFunction.indexBalance);\n- float shardBalance = settings.getAsFloat(SETTING_SHARD_BALANCE_FACTOR, weightFunction.shardBalance);\n- float primaryBalance = settings.getAsFloat(SETTING_PRIMARY_BALANCE_FACTOR, weightFunction.primaryBalance);\n+ final float indexBalance = settings.getAsFloat(SETTING_INDEX_BALANCE_FACTOR, weightFunction.indexBalance);\n+ final float shardBalance = settings.getAsFloat(SETTING_SHARD_BALANCE_FACTOR, weightFunction.shardBalance);\n+ final float primaryBalance = settings.getAsFloat(SETTING_PRIMARY_BALANCE_FACTOR, weightFunction.primaryBalance);\n float threshold = settings.getAsFloat(SETTING_THRESHOLD, BalancedShardsAllocator.this.threshold);\n if (threshold <= 0.0f) {\n throw new ElasticSearchIllegalArgumentException(\"threshold must be greater than 0.0f but was: \" + threshold);\n@@ -87,7 +87,9 @@ public void onRefreshSettings(Settings settings) {\n }\n \n private volatile WeightFunction weightFunction = new WeightFunction(DEFAULT_INDEX_BALANCE_FACTOR, DEFAULT_SHARD_BALANCE_FACTOR, DEFAULT_PRIMARY_BALANCE_FACTOR);\n+\n private volatile float threshold = 1.0f;\n+ \n \n public BalancedShardsAllocator(Settings settings) {\n this(settings, new NodeSettingsService(settings));\n@@ -185,25 +187,72 @@ public static class WeightFunction {\n private final float indexBalance;\n private final float shardBalance;\n private final float primaryBalance;\n+ private final EnumMap<Operation, float[]> thetaMap = new EnumMap<BalancedShardsAllocator.Operation, float[]>(Operation.class);\n \n public WeightFunction(float indexBalance, float shardBalance, float primaryBalance) {\n- final float sum = indexBalance + shardBalance + primaryBalance;\n+ float sum = indexBalance + shardBalance + primaryBalance;\n if (sum <= 0.0f) {\n throw new ElasticSearchIllegalArgumentException(\"Balance factors must sum to a value > 0 but was: \" + sum);\n }\n- this.indexBalance = indexBalance / sum;\n- this.shardBalance = shardBalance / sum;\n- this.primaryBalance = primaryBalance / sum;\n+ final float[] defaultTheta = new float[] { shardBalance / sum, indexBalance / sum, primaryBalance / sum };\n+ for(Operation operation : Operation.values()) {\n+ switch(operation) {\n+ case THRESHOLD_CHECK:\n+ sum = indexBalance + shardBalance;\n+ if (sum <= 0.0f) {\n+ thetaMap.put(operation, defaultTheta);\n+ }\n+ thetaMap.put(operation, new float[] { shardBalance / sum, indexBalance / sum, 0});\n+ break;\n+ case BALANCE:\n+ case ALLOCATE:\n+ case MOVE:\n+ thetaMap.put(operation, defaultTheta);\n+ break;\n+ default:\n+ assert false;\n+ }\n+ }\n+ this.indexBalance = indexBalance;\n+ this.shardBalance = shardBalance;\n+ this.primaryBalance = primaryBalance;\n }\n \n- public float weight(Balancer balancer, ModelNode node, String index) {\n- final float weightShard = shardBalance * (node.numShards() - balancer.avgShardsPerNode());\n- final float weightIndex = indexBalance * (node.numShards(index) - balancer.avgShardsPerNode(index));\n- final float weightPrimary = primaryBalance * (node.numPrimaries() - balancer.avgPrimariesPerNode());\n- return weightShard + weightIndex + weightPrimary;\n+ public float weight(Operation operation, Balancer balancer, ModelNode node, String index) {\n+ final float weightShard = (node.numShards() - balancer.avgShardsPerNode());\n+ final float weightIndex = (node.numShards(index) - balancer.avgShardsPerNode(index));\n+ final float weightPrimary = (node.numPrimaries() - balancer.avgPrimariesPerNode());\n+ final float[] theta = thetaMap.get(operation);\n+ assert theta != null;\n+ return theta[0] * weightShard + theta[1] * weightIndex + theta[2] * weightPrimary; \n }\n \n }\n+ \n+ /**\n+ * An enum that donates the actual operation the {@link WeightFunction} is\n+ * applied to.\n+ */\n+ public static enum Operation {\n+ /**\n+ * Provided during balance operations. \n+ */\n+ BALANCE,\n+ /**\n+ * Provided during initial allocation operation for unassigned shards. \n+ */\n+ ALLOCATE,\n+ /**\n+ * Provided during move operation.\n+ */\n+ MOVE, \n+ /**\n+ * Provided when the weight delta is checked against the configured threshold.\n+ * This can be used to ignore tie-breaking weight factors that should not \n+ * solely trigger a relocation unless the delta is above the threshold. \n+ */\n+ THRESHOLD_CHECK\n+ }\n \n /**\n * A {@link Balancer}\n@@ -218,14 +267,15 @@ public static class Balancer {\n \n private final float threshold;\n private final MetaData metaData;\n-\n+ \n private final Predicate<MutableShardRouting> assignedFilter = new Predicate<MutableShardRouting>() {\n @Override\n public boolean apply(MutableShardRouting input) {\n return input.assignedToNode();\n }\n };\n \n+\n public Balancer(ESLogger logger, RoutingAllocation allocation, WeightFunction weight, float threshold) {\n this.logger = logger;\n this.allocation = allocation;\n@@ -325,8 +375,8 @@ public boolean balance() {\n boolean changed = initialize(allocation.routingNodes());\n NodeSorter sorter = newNodeSorter();\n if (nodes.size() > 1) { /* skip if we only have one node */\n- for (String index : buildWeightOrderedIndidces(sorter)) {\n- sorter.reset(index);\n+ for (String index : buildWeightOrderedIndidces(Operation.BALANCE, sorter)) {\n+ sorter.reset(Operation.BALANCE,index);\n final float[] weights = sorter.weights;\n final ModelNode[] modelNodes = sorter.modelNodes;\n int lowIdx = 0;\n@@ -336,8 +386,10 @@ public boolean balance() {\n final ModelNode maxNode = modelNodes[highIdx];\n if (maxNode.numShards(index) > 0) {\n float delta = weights[highIdx] - weights[lowIdx];\n+ delta = delta <= threshold ? delta : sorter.weight(Operation.THRESHOLD_CHECK, maxNode) - sorter.weight(Operation.THRESHOLD_CHECK, minNode);\n if (delta <= threshold) {\n if (logger.isTraceEnabled()) {\n+ \n logger.trace(\"Stop balancing index [{}] min_node [{}] weight: [{}] max_node [{}] weight: [{}] delta: [{}]\",\n index, maxNode.getNodeId(), weights[highIdx], minNode.getNodeId(), weights[lowIdx], delta);\n }\n@@ -349,14 +401,14 @@ public boolean balance() {\n }\n /* pass the delta to the replication function to prevent relocations that only swap the weights of the two nodes.\n * a relocation must bring us closer to the balance if we only achive the same delta the relocation is useless */\n- if (tryRelocateShard(minNode, maxNode, index, delta)) {\n+ if (tryRelocateShard(Operation.BALANCE, minNode, maxNode, index, delta)) {\n /*\n * TODO we could be a bit smarter here, we don't need to fully sort necessarily\n * we could just find the place to insert linearly but the win might be minor\n * compared to the added complexity\n */\n- weights[lowIdx] = sorter.weight(modelNodes[lowIdx]);\n- weights[highIdx] = sorter.weight(modelNodes[highIdx]);\n+ weights[lowIdx] = sorter.weight(Operation.BALANCE, modelNodes[lowIdx]);\n+ weights[highIdx] = sorter.weight(Operation.BALANCE, modelNodes[highIdx]);\n sorter.quickSort(0, weights.length - 1);\n lowIdx = 0;\n highIdx = weights.length - 1;\n@@ -397,11 +449,11 @@ public boolean balance() {\n * average. To re-balance we need to move shards back eventually likely\n * to the nodes we relocated them from.\n */\n- private String[] buildWeightOrderedIndidces(NodeSorter sorter) {\n+ private String[] buildWeightOrderedIndidces(Operation operation, NodeSorter sorter) {\n final String[] indices = this.indices.toArray(new String[this.indices.size()]);\n final float[] deltas = new float[indices.length];\n for (int i = 0; i < deltas.length; i++) {\n- sorter.reset(indices[i]);\n+ sorter.reset(operation, indices[i]);\n deltas[i] = sorter.delta();\n }\n new SorterTemplate() {\n@@ -459,7 +511,7 @@ public boolean move(MutableShardRouting shard, RoutingNode node) {\n final ModelNode sourceNode = nodes.get(node.nodeId());\n assert sourceNode != null;\n final NodeSorter sorter = newNodeSorter();\n- sorter.reset(shard.getIndex());\n+ sorter.reset(Operation.MOVE, shard.getIndex());\n final ModelNode[] nodes = sorter.modelNodes;\n assert sourceNode.containsShard(shard);\n /*\n@@ -577,7 +629,7 @@ public int compare(MutableShardRouting o1,\n \t */\n if (!node.containsShard(shard)) {\n node.addShard(shard, Decision.ALWAYS);\n- float currentWeight = weight.weight(this, node, shard.index());\n+ float currentWeight = weight.weight(Operation.ALLOCATE, this, node, shard.index());\n \t /*\n \t * Remove the shard from the node again this is only a\n \t * simulation\n@@ -660,7 +712,7 @@ public int compare(MutableShardRouting o1,\n * balance model. Iff this method returns a <code>true</code> the relocation has already been executed on the\n * simulation model as well as on the cluster.\n */\n- private boolean tryRelocateShard(ModelNode minNode, ModelNode maxNode, String idx, float minCost) {\n+ private boolean tryRelocateShard(Operation operation, ModelNode minNode, ModelNode maxNode, String idx, float minCost) {\n final ModelIndex index = maxNode.getIndex(idx);\n if (index != null) {\n if (logger.isTraceEnabled()) {\n@@ -684,7 +736,7 @@ private boolean tryRelocateShard(ModelNode minNode, ModelNode maxNode, String id\n Decision srcDecision;\n if ((srcDecision = maxNode.removeShard(shard)) != null) {\n minNode.addShard(shard, srcDecision);\n- final float delta = weight.weight(this, minNode, idx) - weight.weight(this, maxNode, idx);\n+ final float delta = weight.weight(operation, this, minNode, idx) - weight.weight(operation, this, maxNode, idx);\n if (delta < minCost) {\n minCost = delta;\n candidate = shard;\n@@ -920,7 +972,6 @@ static final class NodeSorter extends SorterTemplate {\n private float pivotWeight;\n \n public NodeSorter(ModelNode[] modelNodes, WeightFunction function, Balancer balancer) {\n-\n this.function = function;\n this.balancer = balancer;\n this.modelNodes = modelNodes;\n@@ -931,16 +982,16 @@ public NodeSorter(ModelNode[] modelNodes, WeightFunction function, Balancer bala\n * Resets the sorter, recalculates the weights per node and sorts the\n * nodes by weight, with minimal weight first.\n */\n- public void reset(String index) {\n+ public void reset(Operation operation, String index) {\n this.index = index;\n for (int i = 0; i < weights.length; i++) {\n- weights[i] = weight(modelNodes[i]);\n+ weights[i] = weight(operation, modelNodes[i]);\n }\n quickSort(0, modelNodes.length - 1);\n }\n \n- public float weight(ModelNode node) {\n- return function.weight(balancer, node, index);\n+ public float weight(Operation operation, ModelNode node) {\n+ return function.weight(operation, balancer, node, index);\n }\n \n @Override", "filename": "src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java", "status": "modified" }, { "diff": "@@ -30,22 +30,33 @@\n import static org.elasticsearch.test.unit.cluster.routing.allocation.RoutingAllocationTests.newNode;\n import static org.hamcrest.MatcherAssert.assertThat;\n \n+import java.util.List;\n+\n import org.elasticsearch.cluster.ClusterState;\n import org.elasticsearch.cluster.metadata.IndexMetaData;\n import org.elasticsearch.cluster.metadata.MetaData;\n+import org.elasticsearch.cluster.node.DiscoveryNode;\n import org.elasticsearch.cluster.node.DiscoveryNodes;\n+import org.elasticsearch.cluster.routing.MutableShardRouting;\n import org.elasticsearch.cluster.routing.RoutingNode;\n import org.elasticsearch.cluster.routing.RoutingNodes;\n import org.elasticsearch.cluster.routing.RoutingTable;\n import org.elasticsearch.cluster.routing.ShardRouting;\n+import org.elasticsearch.cluster.routing.ShardRoutingState;\n import org.elasticsearch.cluster.routing.allocation.AllocationService;\n+import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation;\n+import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;\n+import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation;\n import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator;\n+import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator;\n+import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators;\n+import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;\n import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;\n import org.elasticsearch.common.logging.ESLogger;\n import org.elasticsearch.common.logging.Loggers;\n import org.elasticsearch.common.settings.ImmutableSettings;\n+import org.elasticsearch.gateway.none.NoneGatewayAllocator;\n import org.elasticsearch.node.settings.NodeSettingsService;\n-import org.elasticsearch.node.settings.NodeSettingsService.Listener;\n import org.hamcrest.Matchers;\n import org.testng.annotations.Test;\n \n@@ -353,4 +364,163 @@ public void addListener(Listener listener) {\n assertThat(allocator.getThreshold(), Matchers.equalTo(3.0f));\n }\n \n+ @Test\n+ public void testNoRebalanceOnPrimaryOverload() {\n+\n+ ImmutableSettings.Builder settings = settingsBuilder();\n+ AllocationService strategy = new AllocationService(settings.build(), new AllocationDeciders(settings.build(),\n+ new NodeSettingsService(ImmutableSettings.Builder.EMPTY_SETTINGS)), new ShardsAllocators(settings.build(),\n+ new NoneGatewayAllocator(), new ShardsAllocator() {\n+\n+ @Override\n+ public boolean rebalance(RoutingAllocation allocation) {\n+ return false;\n+ }\n+\n+ @Override\n+ public boolean move(MutableShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {\n+ return false;\n+ }\n+\n+ @Override\n+ public void applyStartedShards(StartedRerouteAllocation allocation) {\n+ \n+ \n+ }\n+\n+ @Override\n+ public void applyFailedShards(FailedRerouteAllocation allocation) {\n+ }\n+ \n+ /*\n+ * // this allocator tries to rebuild this scenario where a rebalance is\n+ * // triggered solely by the primary overload on node [1] where a shard\n+ * // is rebalanced to node 0 \n+ routing_nodes:\n+ -----node_id[0][V]\n+ --------[test][0], node[0], [R], s[STARTED]\n+ --------[test][4], node[0], [R], s[STARTED]\n+ -----node_id[1][V]\n+ --------[test][0], node[1], [P], s[STARTED]\n+ --------[test][1], node[1], [P], s[STARTED]\n+ --------[test][3], node[1], [R], s[STARTED]\n+ -----node_id[2][V]\n+ --------[test][1], node[2], [R], s[STARTED]\n+ --------[test][2], node[2], [R], s[STARTED]\n+ --------[test][4], node[2], [P], s[STARTED]\n+ -----node_id[3][V]\n+ --------[test][2], node[3], [P], s[STARTED]\n+ --------[test][3], node[3], [P], s[STARTED]\n+ ---- unassigned\n+ */\n+ @Override\n+ public boolean allocateUnassigned(RoutingAllocation allocation) {\n+ List<MutableShardRouting> unassigned = allocation.routingNodes().unassigned();\n+ boolean changed = !unassigned.isEmpty();\n+ for (MutableShardRouting sr : unassigned) {\n+ switch (sr.id()) {\n+ case 0:\n+ if (sr.primary()) {\n+ allocation.routingNodes().node(\"node1\").add(sr);\n+ } else {\n+ allocation.routingNodes().node(\"node0\").add(sr);\n+ }\n+ break;\n+ case 1:\n+ if (sr.primary()) {\n+ allocation.routingNodes().node(\"node1\").add(sr);\n+ } else {\n+ allocation.routingNodes().node(\"node2\").add(sr);\n+ }\n+ break;\n+ case 2:\n+ if (sr.primary()) {\n+ allocation.routingNodes().node(\"node3\").add(sr);\n+ } else {\n+ allocation.routingNodes().node(\"node2\").add(sr);\n+ }\n+ break;\n+ case 3:\n+ if (sr.primary()) {\n+ allocation.routingNodes().node(\"node3\").add(sr);\n+ } else {\n+ allocation.routingNodes().node(\"node1\").add(sr);\n+ }\n+ break;\n+ case 4:\n+ if (sr.primary()) {\n+ allocation.routingNodes().node(\"node2\").add(sr);\n+ } else {\n+ allocation.routingNodes().node(\"node0\").add(sr);\n+ }\n+ break;\n+ }\n+\n+ }\n+ unassigned.clear();\n+ return changed;\n+ }\n+ }));\n+ MetaData.Builder metaDataBuilder = newMetaDataBuilder();\n+ RoutingTable.Builder routingTableBuilder = routingTable();\n+ IndexMetaData.Builder indexMeta = newIndexMetaDataBuilder(\"test\").numberOfShards(5).numberOfReplicas(1);\n+ metaDataBuilder = metaDataBuilder.put(indexMeta);\n+ MetaData metaData = metaDataBuilder.build();\n+ for (IndexMetaData index : metaData.indices().values()) {\n+ routingTableBuilder.addAsNew(index);\n+ }\n+ RoutingTable routingTable = routingTableBuilder.build();\n+ DiscoveryNodes.Builder nodes = newNodesBuilder();\n+ for (int i = 0; i < 4; i++) {\n+ DiscoveryNode node = newNode(\"node\"+i);\n+ nodes.put(node);\n+ }\n+ \n+ ClusterState clusterState = newClusterStateBuilder().nodes(nodes).metaData(metaData).routingTable(routingTable).build();\n+ routingTable = strategy.reroute(clusterState).routingTable();\n+ clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();\n+ RoutingNodes routingNodes = clusterState.routingNodes();\n+ \n+ for (RoutingNode routingNode : routingNodes) {\n+ for (MutableShardRouting mutableShardRouting : routingNode) {\n+ assertThat(mutableShardRouting.state(), Matchers.equalTo(ShardRoutingState.INITIALIZING));\n+ }\n+ }\n+ strategy = new AllocationService(settings.build());\n+\n+ logger.info(\"use the new allocator and check if it moves shards\");\n+ routingNodes = clusterState.routingNodes();\n+ routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();\n+ clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();\n+ routingNodes = clusterState.routingNodes();\n+ for (RoutingNode routingNode : routingNodes) {\n+ for (MutableShardRouting mutableShardRouting : routingNode) {\n+ assertThat(mutableShardRouting.state(), Matchers.equalTo(ShardRoutingState.STARTED));\n+ }\n+ }\n+ \n+ logger.info(\"start the replica shards\");\n+ routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();\n+ clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();\n+ routingNodes = clusterState.routingNodes();\n+ \n+ for (RoutingNode routingNode : routingNodes) {\n+ for (MutableShardRouting mutableShardRouting : routingNode) {\n+ assertThat(mutableShardRouting.state(), Matchers.equalTo(ShardRoutingState.STARTED));\n+ }\n+ }\n+\n+ logger.info(\"rebalancing\");\n+ routingTable = strategy.reroute(clusterState).routingTable();\n+ clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();\n+ routingNodes = clusterState.routingNodes();\n+ \n+ for (RoutingNode routingNode : routingNodes) {\n+ for (MutableShardRouting mutableShardRouting : routingNode) {\n+ assertThat(mutableShardRouting.state(), Matchers.equalTo(ShardRoutingState.STARTED));\n+ }\n+ }\n+ \n+ }\n+ \n }\n\\ No newline at end of file", "filename": "src/test/java/org/elasticsearch/test/unit/cluster/routing/allocation/BalanceConfigurationTests.java", "status": "modified" } ] }
{ "body": "ES version is 0.90.0 (from deb package). Count of products in test - 667 (333 in stock and 334 out of stock). 50 products was added less than a month ago (37 in stock and 13 out of stock). One product (in stock) have a \"Hot Offer\" label.\n\nSo, my first query (search_type = count):\n\n``` JSON\n{\n \"query\": {\n \"match_all\": {}\n },\n \"filter\": {\n \"bool\": {\n \"should\": [\n {\"term\": {\"hotOffer\": true}},\n {\"range\": {\"created\": {\"from\": \"2013-04-02\"}}}\n ]\n }\n }\n}\n```\n\nThe result is 51 hits. It's correct result.\nNow add a <code>'must'</code> clause to <code>'bool'</code> filter:\n\n``` JSON\n{\n \"query\": {\n \"match_all\": {}\n },\n \"filter\": {\n \"bool\": {\n \"must\": {\n \"term\": {\"inStock\": true}\n },\n \"should\": [\n {\"term\": {\"hotOffer\": true}},\n {\"range\": {\"created\": {\"from\": \"2013-04-02\"}}}\n ]\n }\n }\n}\n```\n\nThe result is 332 hits. It's wrong result (should be 38).\nRemove <code>'range'</code> filter from <code>'should'</code> clause:\n\n``` JSON\n{\n \"query\": {\n \"match_all\": {}\n },\n \"filter\": {\n \"bool\": {\n \"must\": {\n \"term\": {\"inStock\": true}\n },\n \"should\": [\n {\"term\": {\"hotOffer\": true}}\n ]\n }\n }\n}\n```\n\nThe result is 319 hits. It's wrong result, again (should be 1).\nIf a <code>'bool'</code> filter replace by a combination of <code>'and'</code> and <code>'or'</code> filters then all is OK.\n\n``` JSON\n{\n \"query\": {\n \"match_all\": {}\n },\n \"filter\": {\n \"and\": [\n {\"term\": {\"inStock\": true}},\n {\n \"or\": [\n {\"term\": {\"hotOffer\": true}},\n {\"range\": {\"created\": {\"from\": \"2013-04-02\"}}}\n ]\n }\n ]\n }\n}\n```\n\nThe result is 38 hits.\n\nIt seems that if both clause (<code>must</code> and <code>should</code>) are in <code>bool</code> filter then results are incorrect. In 0.20.6 all works fine. \n", "comments": [ { "body": "This is a bug. I will fix it soon.\n", "created_at": "2013-05-03T09:52:46Z" } ], "number": 2979, "title": "Query DSL: Wrong result on bool filter with 'must' and 'should' clauses" }
{ "body": "PR for #2979\n", "number": 2981, "review_comments": [], "title": "PR for #2979" }
{ "commits": [ { "message": "Fixed issue where 'fast' should filter can make documents that didn't match the must or must_not clause a match again.\nRelates to #2979" } ], "files": [ { "diff": "@@ -31,9 +31,7 @@\n import org.elasticsearch.common.lucene.docset.NotDocIdSet;\n \n import java.io.IOException;\n-import java.util.ArrayList;\n-import java.util.Iterator;\n-import java.util.List;\n+import java.util.*;\n \n /**\n * Similar to {@link org.apache.lucene.queries.BooleanFilter}.\n@@ -80,6 +78,7 @@ public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws\n boolean hasNonEmptyShouldClause = false;\n boolean hasMustClauses = false;\n boolean hasMustNotClauses = false;\n+ boolean mustOrMustNotBeforeShould = false;\n for (int i = 0; i < clauses.size(); i++) {\n FilterClause clause = clauses.get(i);\n DocIdSet set = clause.getFilter().getDocIdSet(context, acceptDocs);\n@@ -89,6 +88,9 @@ public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws\n return null;\n }\n } else if (clause.getOccur() == Occur.SHOULD) {\n+ if (!hasShouldClauses && (hasMustClauses || hasMustNotClauses)) {\n+ mustOrMustNotBeforeShould = true;\n+ }\n hasShouldClauses = true;\n if (DocIdSets.isEmpty(set)) {\n continue;\n@@ -113,6 +115,32 @@ public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws\n return null;\n }\n \n+ if (mustOrMustNotBeforeShould) {\n+ // Sort the clause only once if we encounter a should before a must or must_not clause\n+ Collections.sort(clauses, new Comparator<FilterClause>() {\n+ @Override\n+ public int compare(FilterClause o1, FilterClause o2) {\n+ if (o1.getOccur() != o2.getOccur()) {\n+ return o1.getOccur() == Occur.SHOULD ? -1 : 1;\n+ } else {\n+ return 0;\n+ }\n+ }\n+ });\n+\n+ // Because we sorted the clause we also need to sort the result clauses\n+ Collections.sort(results, new Comparator<ResultClause>() {\n+ @Override\n+ public int compare(ResultClause o1, ResultClause o2) {\n+ if (o1.clause.getOccur() != o2.clause.getOccur()) {\n+ return o1.clause.getOccur() == Occur.SHOULD ? -1 : 1;\n+ } else {\n+ return 0;\n+ }\n+ }\n+ });\n+ }\n+\n // now, go over the clauses and apply the \"fast\" ones...\n hasNonEmptyShouldClause = false;\n boolean hasBits = false;", "filename": "src/main/java/org/elasticsearch/common/lucene/search/XBooleanFilter.java", "status": "modified" }, { "diff": "@@ -20,7 +20,6 @@\n package org.elasticsearch.test.integration.search.query;\n \n import org.elasticsearch.ElasticSearchException;\n-import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;\n import org.elasticsearch.action.search.SearchPhaseExecutionException;\n import org.elasticsearch.action.search.SearchResponse;\n import org.elasticsearch.action.search.SearchType;\n@@ -46,7 +45,6 @@\n import static org.elasticsearch.index.query.QueryBuilders.*;\n import static org.hamcrest.MatcherAssert.assertThat;\n import static org.hamcrest.Matchers.anyOf;\n-import static org.hamcrest.Matchers.greaterThan;\n import static org.hamcrest.Matchers.equalTo;\n import static org.testng.Assert.assertTrue;\n import static org.testng.Assert.fail;\n@@ -1203,21 +1201,25 @@ public void testNumericRangeFilter_2826() throws Exception {\n .execute().actionGet();\n \n client.prepareIndex(\"test\", \"type1\", \"1\").setSource(jsonBuilder().startObject()\n+ .field(\"field1\", \"test1\")\n .field(\"num_long\", 1)\n .endObject())\n .execute().actionGet();\n \n client.prepareIndex(\"test\", \"type1\", \"2\").setSource(jsonBuilder().startObject()\n+ .field(\"field1\", \"test1\")\n .field(\"num_long\", 2)\n .endObject())\n .execute().actionGet();\n \n client.prepareIndex(\"test\", \"type1\", \"3\").setSource(jsonBuilder().startObject()\n+ .field(\"field1\", \"test2\")\n .field(\"num_long\", 3)\n .endObject())\n .execute().actionGet();\n \n client.prepareIndex(\"test\", \"type1\", \"4\").setSource(jsonBuilder().startObject()\n+ .field(\"field1\", \"test2\")\n .field(\"num_long\", 4)\n .endObject())\n .execute().actionGet();\n@@ -1238,6 +1240,16 @@ public void testNumericRangeFilter_2826() throws Exception {\n ).execute().actionGet();\n \n assertThat(response.getHits().totalHits(), equalTo(4l));\n+\n+ // This made #2979 fail!\n+ response = client.prepareSearch(\"test\").setFilter(\n+ FilterBuilders.boolFilter()\n+ .must(FilterBuilders.termFilter(\"field1\", \"test1\"))\n+ .should(FilterBuilders.rangeFilter(\"num_long\").from(1).to(2))\n+ .should(FilterBuilders.rangeFilter(\"num_long\").from(3).to(4))\n+ ).execute().actionGet();\n+\n+ assertThat(response.getHits().totalHits(), equalTo(2l));\n }\n \n @Test // see #2926", "filename": "src/test/java/org/elasticsearch/test/integration/search/query/SimpleQueryTests.java", "status": "modified" } ] }
{ "body": "ES Version: 0.90.0.RC2\n\nSteps to reproduce:\n\nok case, without source exclusion:\n\n``` bash\ncurl -XDELETE 'http://localhost:9200/geo_test'\ncurl -XPUT 'http://localhost:9200/geo_test'\ncurl -XPUT 'http://localhost:9200/geo_test/item/_mapping' -d '{\n \"item\" : {\n \"properties\" : {\n \"location\" : {\n \"type\" : \"object\",\n \"properties\": {\n \"point\": {\"type\": \"geo_point\"},\n \"area\": {\"type\": \"geo_shape\"}\n }\n }\n }\n }\n}'\ncurl -XPUT 'http://localhost:9200/geo_test/item/1' -d '{\n \"location\": {\"point\": [45.0, 45.0]}\n}'\ncurl -XPUT 'http://localhost:9200/geo_test/item/2' -d '{\n \"location\": {\n \"area\": {\n \"type\" : \"envelope\",\n \"coordinates\" : [[44.0, 46.0], [45.0, 45.0]]\n }\n }\n}'\ncurl -XPOST 'http://localhost:9200/geo_test/item/_search?pretty' -d '{\n \"query\": {\"match_all\": {}}\n}'\n```\n\nreturns the coordinates for geo_point and geo_shape items:\n\n``` JSON\n{\n \"took\": 1,\n \"timed_out\": false,\n \"_shards\": {\n \"total\": 1,\n \"successful\": 1,\n \"failed\": 0\n },\n \"hits\": {\n \"total\": 2,\n \"max_score\": 1.0,\n \"hits\": [{\n \"_index\": \"geo_test\",\n \"_type\": \"item\",\n \"_id\": \"1\",\n \"_score\": 1.0,\n \"_source\": {\n \"location\": {\n \"point\": [45.0, 45.0]\n }\n }\n }, {\n \"_index\": \"geo_test\",\n \"_type\": \"item\",\n \"_id\": \"2\",\n \"_score\": 1.0,\n \"_source\": {\n \"location\": {\n \"area\": {\n \"type\": \"envelope\",\n \"coordinates\": [\n [45.0, 45.0],\n [44.0, 46.0]\n ]\n }\n }\n }\n }]\n }\n}\n```\n\nhowever the same scenario but with a source exclusion mapping of an arbitrary field does not return the geo_shape coordinates any longer:\n\n``` bash\ncurl -XDELETE 'http://localhost:9200/geo_test'\ncurl -XPUT 'http://localhost:9200/geo_test'\ncurl -XPUT 'http://localhost:9200/geo_test/item/_mapping' -d '{\n \"item\": {\n \"_source\": {\n \"excludes\": [\"body\"]\n },\n \"properties\" : {\n \"location\" : {\n \"type\" : \"object\",\n \"properties\": {\n \"point\": {\"type\": \"geo_point\"},\n \"area\": {\"type\": \"geo_shape\"}\n }\n }\n }\n }\n}'\ncurl -XPUT 'http://localhost:9200/geo_test/item/1' -d '{\n \"location\": {\"point\": [45.0, 45.0]}\n}'\ncurl -XPUT 'http://localhost:9200/geo_test/item/2' -d '{\n \"location\": {\n \"area\": {\n \"type\" : \"envelope\",\n \"coordinates\" : [[44.0, 46.0], [45.0, 45.0]]\n }\n }\n}'\ncurl -XPOST 'http://localhost:9200/geo_test/item/_search?pretty' -d '{\n \"query\": {\"match_all\": {}}\n}'\n```\n\nreturns only the geo_point coordinates but not the geo_shape coordinates:\n\n``` JSON\n{\n \"took\": 1,\n \"timed_out\": false,\n \"_shards\": {\n \"total\": 1,\n \"successful\": 1,\n \"failed\": 0\n },\n \"hits\": {\n \"total\": 2,\n \"max_score\": 1.0,\n \"hits\": [{\n \"_index\": \"geo_test\",\n \"_type\": \"item\",\n \"_id\": \"1\",\n \"_score\": 1.0,\n \"_source\": {\n \"location\": {\n \"point\": [45.0, 45.0]\n }\n }\n }, {\n \"_index\": \"geo_test\",\n \"_type\": \"item\",\n \"_id\": \"2\",\n \"_score\": 1.0,\n \"_source\": {\n \"location\": {\n \"area\": {\n \"type\": \"envelope\",\n \"coordinates\": []\n }\n }\n }\n }]\n }\n}\n```\n", "comments": [ { "body": "Hey,\n\nthis looks really strange.. I managed to reproduce it with a smaller example and will try to take a look at it\n\n```\ncurl -XDELETE 'http://localhost:9200/geo_test'\ncurl -XPUT 'http://localhost:9200/geo_test'\ncurl -XPUT 'http://localhost:9200/geo_test/item/_mapping' -d '{\n \"item\": {\n \"_source\": { \"excludes\": [\"body\"] },\n \"properties\" : {\n \"area\": {\"type\": \"geo_shape\"}\n }\n }\n}'\n\ncurl -XPUT 'http://localhost:9200/geo_test/item/1?refresh=true' -d '{\n \"area\": {\n \"type\" : \"envelope\",\n \"coordinates\" : [[-45.0, 45.0], [45.0, -45.0]]\n }\n}'\n\ncurl -XPOST 'http://localhost:9200/geo_test/item/_search?pretty' -d '{\n \"query\": {\"match_all\": {}}\n}'\n\ncurl http://localhost:9200/geo_test/item/1\n```\n\nAs soon as the source exclude is omitted in the mapping, everything is working again. The GET on the id also works as expected.\n", "created_at": "2013-04-30T12:36:57Z" }, { "body": "Hi, I was just trying this in ES 0.90.1 and I still get the same error as reported above. Could you please reopen this issues and doublecheck again? Thanks a lot.\n", "created_at": "2013-06-03T13:28:05Z" }, { "body": "@fxh hey, will reopen it and recheck as soon as possible\n", "created_at": "2013-06-03T15:52:09Z" }, { "body": "@fxh we did not include the fix in the 0.90 branch, even though I thought so. I have just pushed it into the 0.90 release branch so it will be included in the next release. Sorry for the inconvenience and thanks for bringing it up again!\n", "created_at": "2013-06-05T10:04:21Z" } ], "number": 2944, "title": "source exclusion mapping prevents geo shape coordinates to be returned in query result source field" }
{ "body": "The filter method of XContentMapValues actually filtered out nested\narrays/lists completely due to a bug in the filter method, which threw\naway all data inside of such an nested array.\n\nCloses #2944\nThis bug was a follow up problem, because of the filtering of nested arrays\nin case source exclusion was configured.\n", "number": 2955, "review_comments": [], "title": "XContentMapValues.filter now works with nested arrays" }
{ "commits": [ { "message": "XContentMapValues.filter now works with nested arrays\n\nThe filter method of XContentMapValues actually filtered out nested\narrays/lists completely due to a bug in the filter method, which threw\naway all data inside of such an array.\n\nCloses #2944\nThis bug was a follow up problem, because of the filtering of nested arrays\nin case source exclusion was configured." } ], "files": [ { "diff": "@@ -206,6 +206,9 @@ private static void filter(List<Object> from, List<Object> to, String[] includes\n } else if (o instanceof List) {\n List<Object> innerInto = new ArrayList<Object>();\n filter((List<Object>) o, innerInto, includes, excludes, sb);\n+ if (!innerInto.isEmpty()) {\n+ to.add(innerInto);\n+ }\n } else {\n to.add(o);\n }", "filename": "src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java", "status": "modified" }, { "diff": "@@ -26,6 +26,7 @@\n import org.elasticsearch.common.geo.GeoJSONShapeSerializer;\n import org.elasticsearch.common.xcontent.XContentBuilder;\n import org.elasticsearch.common.xcontent.XContentFactory;\n+import org.elasticsearch.index.query.QueryBuilders;\n import org.elasticsearch.test.integration.AbstractNodesTests;\n import org.testng.annotations.AfterClass;\n import org.testng.annotations.BeforeClass;\n@@ -36,7 +37,10 @@\n import static org.elasticsearch.index.query.FilterBuilders.*;\n import static org.elasticsearch.index.query.QueryBuilders.*;\n import static org.hamcrest.MatcherAssert.assertThat;\n-import static org.hamcrest.Matchers.equalTo;\n+import static org.hamcrest.Matchers.*;\n+\n+import java.util.List;\n+import java.util.Map;\n \n public class GeoShapeIntegrationTests extends AbstractNodesTests {\n \n@@ -203,4 +207,48 @@ public void testIndexedShapeReference() throws Exception {\n assertThat(searchResponse.getHits().hits().length, equalTo(1));\n assertThat(searchResponse.getHits().getAt(0).id(), equalTo(\"1\"));\n }\n+\n+ @Test // Issue 2944\n+ public void testThatShapeIsReturnedEvenWhenExclusionsAreSet() throws Exception {\n+ client.admin().indices().prepareDelete().execute().actionGet();\n+\n+ String mapping = XContentFactory.jsonBuilder().startObject().startObject(\"type1\")\n+ .startObject(\"properties\").startObject(\"location\")\n+ .field(\"type\", \"geo_shape\")\n+ .endObject().endObject()\n+ .startObject(\"_source\")\n+ .startArray(\"excludes\").value(\"nonExistingField\").endArray()\n+ .endObject()\n+ .endObject().endObject()\n+ .string();\n+ client.admin().indices().prepareCreate(\"test\").addMapping(\"type1\", mapping).execute().actionGet();\n+ client.admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();\n+\n+ client.prepareIndex(\"test\", \"type1\", \"1\").setSource(jsonBuilder().startObject()\n+ .field(\"name\", \"Document 1\")\n+ .startObject(\"location\")\n+ .field(\"type\", \"envelope\")\n+ .startArray(\"coordinates\").startArray().value(-45.0).value(45).endArray().startArray().value(45).value(-45).endArray().endArray()\n+ .endObject()\n+ .endObject()).execute().actionGet();\n+\n+ client.admin().indices().prepareRefresh(\"test\").execute().actionGet();\n+\n+ SearchResponse searchResponse = client.prepareSearch(\"test\").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet();\n+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));\n+\n+ Map<String, Object> indexedMap = searchResponse.getHits().getAt(0).sourceAsMap();\n+ assertThat(indexedMap.get(\"location\"), instanceOf(Map.class));\n+ Map<String, Object> locationMap = (Map<String, Object>) indexedMap.get(\"location\");\n+ assertThat(locationMap.get(\"coordinates\"), instanceOf(List.class));\n+ List<List<Number>> coordinates = (List<List<Number>>) locationMap.get(\"coordinates\");\n+ assertThat(coordinates.size(), equalTo(2));\n+ assertThat(coordinates.get(0).size(), equalTo(2));\n+ assertThat(coordinates.get(0).get(0).doubleValue(), equalTo(-45.0));\n+ assertThat(coordinates.get(0).get(1).doubleValue(), equalTo(45.0));\n+ assertThat(coordinates.get(1).size(), equalTo(2));\n+ assertThat(coordinates.get(1).get(0).doubleValue(), equalTo(45.0));\n+ assertThat(coordinates.get(1).get(1).doubleValue(), equalTo(-45.0));\n+ assertThat(locationMap.size(), equalTo(2));\n+ }\n }", "filename": "src/test/java/org/elasticsearch/test/integration/search/geo/GeoShapeIntegrationTests.java", "status": "modified" }, { "diff": "@@ -20,8 +20,10 @@\n package org.elasticsearch.test.unit.common.xcontent.support;\n \n import org.elasticsearch.common.Strings;\n+import org.elasticsearch.common.collect.Tuple;\n import org.elasticsearch.common.xcontent.XContentBuilder;\n import org.elasticsearch.common.xcontent.XContentFactory;\n+import org.elasticsearch.common.xcontent.XContentHelper;\n import org.elasticsearch.common.xcontent.XContentType;\n import org.elasticsearch.common.xcontent.support.XContentMapValues;\n import org.testng.annotations.Test;\n@@ -195,4 +197,18 @@ public void testExtractRawValue() throws Exception {\n map = XContentFactory.xContent(XContentType.JSON).createParser(builder.string()).mapAndClose();\n assertThat(XContentMapValues.extractRawValues(\"path1.xxx.path2.yyy.test\", map).get(0).toString(), equalTo(\"value\"));\n }\n+\n+ @Test\n+ public void testThatFilteringWithNestedArrayAndExclusionWorks() throws Exception {\n+ XContentBuilder builder = XContentFactory.jsonBuilder().startObject()\n+ .startArray(\"coordinates\")\n+ .startArray().value(\"foo\").endArray()\n+ .endArray()\n+ .endObject();\n+\n+ Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(builder.bytes(), true);\n+ Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), Strings.EMPTY_ARRAY, new String[]{\"nonExistingField\"});\n+\n+ assertThat(mapTuple.v2(), equalTo(filteredSource));\n+ }\n }", "filename": "src/test/java/org/elasticsearch/test/unit/common/xcontent/support/XContentMapValuesTests.java", "status": "modified" } ] }
{ "body": "## Initial problem report:\n\nAs far as I've tested, the \"includes\" and \"excludes\" options for the _source field have no effect - I always get the whole source. Here's a gist for recreating:\nhttps://gist.github.com/radu-gheorghe/5265495\n\nI'm on 64-bit Ubuntu and I've used the .deb packages of 0.19.7 and 0.90.0RC1 with the same results.\n## @clintongormley's comments (thanks!):\n\nHmm - it doesn't work for get-by-id. It works for search though.\nFlushing the index helped, so it looks like its a translog thing\n", "comments": [ { "body": "Same problem here. Also, when I set the `_source` field to `\"enabled\": False` and create a custom stored property called `original` then I can usually retrieve it via GET `?fields=original`, but never via search using `\"fields\": [\"original\"]`. In some cases (which I don't know how yet how to reproduce) the `original` field would completely disappear, even in GET requests.\n", "created_at": "2013-04-08T22:44:36Z" }, { "body": "Also, saying `\"_source\": { \"includes\": [\"original\" ] }` stores all fields (not just `original`), but if I flush the index then all fields (including `original`) are gone.\n", "created_at": "2013-04-08T23:08:14Z" }, { "body": "Hey,\n\nI can confirm, that this is a problem with translog as clint already mentioned. Currently entries from the translog are always returned as is by the `ShardGetService`. So, if the data is read from the translog and not from the indexreader (before a refresh or a flush), the problem occurs.\n\nI have created a first preliminary and incomplete try to fix this, which I dont like very much and hope for input of other developers. See https://github.com/spinscale/elasticsearch/commit/4fccc68ea7db75758e31d752e52a1e1f21a23006\n", "created_at": "2013-04-09T13:29:56Z" }, { "body": "Forgot to close by commit. Closed by a694e97ab97deccb6c533176737ecb055a95e54a\n", "created_at": "2013-04-30T16:09:12Z" } ], "number": 2829, "title": "_source includes/excludes has no effect when getting documents by ID" }
{ "body": "Currently realtime GET does not take source includes/excludes into account.\nThis patch adds support for the source field mapper includes/excludes\nwhen getting an entry from the transaction log. Even though it introduces\na slight performance penalty, it now adheres to the defined configuration\ninstead of returning all source data when a realtime get is done.\n\nCloses #2829\n", "number": 2951, "review_comments": [], "title": "Support source include/exclude for realtime GET" }
{ "commits": [ { "message": "Support source include/exclude for realtime GET\n\nCurrently realtime GET does not take source includes/excludes into account.\nThis patch adds support for the source field mapper includes/excludes\nwhen getting an entry from the transaction log. Even though it introduces\na slight performance penalty, it now adheres to the defined configuration\ninstead of returning all source data when a realtime get is done." } ], "files": [ { "diff": "@@ -23,11 +23,16 @@\n import org.apache.lucene.index.Term;\n import org.elasticsearch.ElasticSearchException;\n import org.elasticsearch.common.bytes.BytesReference;\n+import org.elasticsearch.common.collect.Tuple;\n import org.elasticsearch.common.inject.Inject;\n import org.elasticsearch.common.lucene.uid.UidField;\n import org.elasticsearch.common.metrics.CounterMetric;\n import org.elasticsearch.common.metrics.MeanMetric;\n import org.elasticsearch.common.settings.Settings;\n+import org.elasticsearch.common.xcontent.XContentFactory;\n+import org.elasticsearch.common.xcontent.XContentHelper;\n+import org.elasticsearch.common.xcontent.XContentType;\n+import org.elasticsearch.common.xcontent.support.XContentMapValues;\n import org.elasticsearch.index.engine.Engine;\n import org.elasticsearch.index.fielddata.IndexFieldDataService;\n import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;\n@@ -270,7 +275,27 @@ public GetResult innerGet(String type, String id, String[] gFields, boolean real\n sourceRequested = false;\n }\n \n- return new GetResult(shardId.index().name(), type, id, get.version(), get.exists(), sourceRequested ? source.source : null, fields);\n+ // Cater for source excludes/includes at the cost of performance\n+ BytesReference sourceToBeReturned = null;\n+ if (sourceRequested) {\n+ sourceToBeReturned = source.source;\n+\n+ SourceFieldMapper sourceFieldMapper = docMapper.sourceMapper();\n+ if (sourceFieldMapper.enabled()) {\n+ boolean filtered = sourceFieldMapper.includes().length > 0 || sourceFieldMapper.excludes().length > 0;\n+ if (filtered) {\n+ Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(source.source, true);\n+ Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), sourceFieldMapper.includes(), sourceFieldMapper.excludes());\n+ try {\n+ sourceToBeReturned = XContentFactory.contentBuilder(mapTuple.v1()).map(filteredSource).bytes();\n+ } catch (IOException e) {\n+ throw new ElasticSearchException(\"Failed to get type [\" + type + \"] and id [\" + id + \"] with includes/excludes set\", e);\n+ }\n+ }\n+ }\n+ }\n+\n+ return new GetResult(shardId.index().name(), type, id, get.version(), get.exists(), sourceToBeReturned, fields);\n }\n } finally {\n get.release();", "filename": "src/main/java/org/elasticsearch/index/get/ShardGetService.java", "status": "modified" }, { "diff": "@@ -214,6 +214,14 @@ public boolean enabled() {\n return this.enabled;\n }\n \n+ public String[] excludes() {\n+ return this.excludes;\n+\n+ }\n+ public String[] includes() {\n+ return this.includes;\n+ }\n+\n @Override\n public FieldType defaultFieldType() {\n return Defaults.FIELD_TYPE;", "filename": "src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java", "status": "modified" }, { "diff": "@@ -381,4 +381,118 @@ public void testGetDocWithMultivaluedFields() throws Exception {\n assertThat(((List) response.getFields().get(\"field\").getValues().get(0)).get(1).toString(), equalTo(\"2\"));\n }\n \n+ @Test\n+ public void testThatGetFromTranslogShouldWorkWithExclude() throws Exception {\n+ client.admin().indices().prepareDelete().execute().actionGet();\n+ String index = \"test\";\n+ String type = \"type1\";\n+\n+ String mapping = jsonBuilder()\n+ .startObject()\n+ .startObject(\"source_excludes\")\n+ .startObject(\"_source\")\n+ .array(\"excludes\", \"excluded\")\n+ .endObject()\n+ .endObject()\n+ .endObject()\n+ .string();\n+\n+ client.admin().indices().prepareCreate(index)\n+ .addMapping(type, mapping)\n+ .setSettings(ImmutableSettings.settingsBuilder().put(\"index.refresh_interval\", -1))\n+ .execute().actionGet();\n+\n+ client.prepareIndex(index, type, \"1\")\n+ .setSource(jsonBuilder().startObject().field(\"field\", \"1\", \"2\").field(\"excluded\", \"should not be seen\").endObject())\n+ .execute().actionGet();\n+\n+ GetResponse responseBeforeFlush = client.prepareGet(index, type, \"1\").execute().actionGet();\n+ client.admin().indices().prepareFlush(index).execute().actionGet();\n+ GetResponse responseAfterFlush = client.prepareGet(index, type, \"1\").execute().actionGet();\n+\n+ assertThat(responseBeforeFlush.isExists(), is(true));\n+ assertThat(responseAfterFlush.isExists(), is(true));\n+ assertThat(responseBeforeFlush.getSourceAsMap(), hasKey(\"field\"));\n+ assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey(\"excluded\")));\n+ assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString()));\n+ }\n+\n+ @Test\n+ public void testThatGetFromTranslogShouldWorkWithInclude() throws Exception {\n+ client.admin().indices().prepareDelete().execute().actionGet();\n+ String index = \"test\";\n+ String type = \"type1\";\n+\n+ String mapping = jsonBuilder()\n+ .startObject()\n+ .startObject(\"source_excludes\")\n+ .startObject(\"_source\")\n+ .array(\"includes\", \"included\")\n+ .endObject()\n+ .endObject()\n+ .endObject()\n+ .string();\n+\n+ client.admin().indices().prepareCreate(index)\n+ .addMapping(type, mapping)\n+ .setSettings(ImmutableSettings.settingsBuilder().put(\"index.refresh_interval\", -1))\n+ .execute().actionGet();\n+\n+ client.prepareIndex(index, type, \"1\")\n+ .setSource(jsonBuilder().startObject().field(\"field\", \"1\", \"2\").field(\"included\", \"should be seen\").endObject())\n+ .execute().actionGet();\n+\n+ GetResponse responseBeforeFlush = client.prepareGet(index, type, \"1\").execute().actionGet();\n+ client.admin().indices().prepareFlush(index).execute().actionGet();\n+ GetResponse responseAfterFlush = client.prepareGet(index, type, \"1\").execute().actionGet();\n+\n+ assertThat(responseBeforeFlush.isExists(), is(true));\n+ assertThat(responseAfterFlush.isExists(), is(true));\n+ assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey(\"field\")));\n+ assertThat(responseBeforeFlush.getSourceAsMap(), hasKey(\"included\"));\n+ assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString()));\n+ }\n+\n+ @Test\n+ public void testThatGetFromTranslogShouldWorkWithIncludeExcludeAndFields() throws Exception {\n+ client.admin().indices().prepareDelete().execute().actionGet();\n+ String index = \"test\";\n+ String type = \"type1\";\n+\n+ String mapping = jsonBuilder()\n+ .startObject()\n+ .startObject(\"source_excludes\")\n+ .startObject(\"_source\")\n+ .array(\"includes\", \"included\")\n+ .array(\"exlcudes\", \"excluded\")\n+ .endObject()\n+ .endObject()\n+ .endObject()\n+ .string();\n+\n+ client.admin().indices().prepareCreate(index)\n+ .addMapping(type, mapping)\n+ .setSettings(ImmutableSettings.settingsBuilder().put(\"index.refresh_interval\", -1))\n+ .execute().actionGet();\n+\n+ client.prepareIndex(index, type, \"1\")\n+ .setSource(jsonBuilder().startObject()\n+ .field(\"field\", \"1\", \"2\")\n+ .field(\"included\", \"should be seen\")\n+ .field(\"excluded\", \"should not be seen\")\n+ .endObject())\n+ .execute().actionGet();\n+\n+ GetResponse responseBeforeFlush = client.prepareGet(index, type, \"1\").setFields(\"_source\", \"included\", \"excluded\").execute().actionGet();\n+ client.admin().indices().prepareFlush(index).execute().actionGet();\n+ GetResponse responseAfterFlush = client.prepareGet(index, type, \"1\").setFields(\"_source\", \"included\", \"excluded\").execute().actionGet();\n+\n+ assertThat(responseBeforeFlush.isExists(), is(true));\n+ assertThat(responseAfterFlush.isExists(), is(true));\n+ assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey(\"excluded\")));\n+ assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey(\"field\")));\n+ assertThat(responseBeforeFlush.getSourceAsMap(), hasKey(\"included\"));\n+ assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString()));\n+ }\n+\n }", "filename": "src/test/java/org/elasticsearch/test/integration/get/GetActionTests.java", "status": "modified" } ] }
{ "body": "Reported via ML:\nhttps://groups.google.com/forum/?fromgroups=#!topic/elasticsearch/Wsv3ziKdeTk\n", "comments": [ { "body": "we have the same issue.. any updates on this?\n", "created_at": "2013-03-01T09:39:09Z" }, { "body": "Any updates on when this might be fixed? I just need to know whether I should work round the issue or if we should just wait for an official fix?\n", "created_at": "2013-04-02T21:24:19Z" }, { "body": "I might take a while, before we fully support parent / child queries via the delete by query api.\n", "created_at": "2013-04-16T11:04:50Z" }, { "body": "Here is a recreation gist https://gist.github.com/ofavre/5434929 , it fails under 0.90.0.RC2.\nIs it safe to use `{\"term\":{\"_parent\":\"parenttype#parentid\"}}` as a workaround?\n", "created_at": "2013-04-22T13:40:55Z" }, { "body": "@ofavre This is safe to use. I'll make sure you don't get a NPE, when you use parent/child like queries with delete by query, but a descriptive error message for now.\n", "created_at": "2013-04-22T15:42:10Z" }, { "body": "Implemented via #3822\n", "created_at": "2013-10-09T08:48:15Z" } ], "number": 2705, "title": "Parent/child queries don't work with via the delete by query api" }
{ "body": "Fixes #2705\n", "number": 2923, "review_comments": [], "title": "Provide a SearchContext for DeleteByQuery" }
{ "commits": [ { "message": "Provide a SearchContext for DeleteByQuery\n\nFixes #2705" } ], "files": [ { "diff": "@@ -31,8 +31,13 @@\n import org.elasticsearch.common.inject.Inject;\n import org.elasticsearch.common.settings.Settings;\n import org.elasticsearch.index.engine.Engine;\n+import org.elasticsearch.index.service.IndexService;\n import org.elasticsearch.index.shard.service.IndexShard;\n import org.elasticsearch.indices.IndicesService;\n+import org.elasticsearch.script.ScriptService;\n+import org.elasticsearch.search.SearchShardTarget;\n+import org.elasticsearch.search.internal.SearchContext;\n+import org.elasticsearch.search.internal.ShardSearchRequest;\n import org.elasticsearch.threadpool.ThreadPool;\n import org.elasticsearch.transport.TransportService;\n \n@@ -41,11 +46,14 @@\n */\n public class TransportShardDeleteByQueryAction extends TransportShardReplicationOperationAction<ShardDeleteByQueryRequest, ShardDeleteByQueryRequest, ShardDeleteByQueryResponse> {\n \n+ private final ScriptService scriptService;\n+\n @Inject\n public TransportShardDeleteByQueryAction(Settings settings, TransportService transportService,\n ClusterService clusterService, IndicesService indicesService, ThreadPool threadPool,\n- ShardStateAction shardStateAction) {\n+ ShardStateAction shardStateAction, ScriptService scriptService) {\n super(settings, transportService, clusterService, indicesService, threadPool, shardStateAction);\n+ this.scriptService = scriptService;\n }\n \n @Override\n@@ -92,18 +100,48 @@ protected ClusterBlockException checkRequestBlock(ClusterState state, ShardDelet\n protected PrimaryResponse<ShardDeleteByQueryResponse, ShardDeleteByQueryRequest> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) {\n ShardDeleteByQueryRequest request = shardRequest.request;\n IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.request.index()).shardSafe(shardRequest.shardId);\n- Engine.DeleteByQuery deleteByQuery = indexShard.prepareDeleteByQuery(request.querySource(), request.filteringAliases(), request.types());\n- indexShard.deleteByQuery(deleteByQuery);\n- return new PrimaryResponse<ShardDeleteByQueryResponse, ShardDeleteByQueryRequest>(shardRequest.request, new ShardDeleteByQueryResponse(), null);\n+\n+ SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), request.index(), shardRequest.shardId);\n+ IndexService indexService = indicesService.indexServiceSafe(request.index());\n+ SearchContext context = new SearchContext(0,\n+ new ShardSearchRequest().types(request.types()).filteringAliases(request.filteringAliases()),\n+ shardTarget, indexShard.searcher(), indexService, indexShard,\n+ scriptService);\n+ SearchContext.setCurrent(context);\n+\n+ try {\n+ Engine.DeleteByQuery deleteByQuery = indexShard.prepareDeleteByQuery(request.querySource(), request.filteringAliases(), request.types());\n+ indexShard.deleteByQuery(deleteByQuery);\n+ return new PrimaryResponse<ShardDeleteByQueryResponse, ShardDeleteByQueryRequest>(shardRequest.request, new ShardDeleteByQueryResponse(), null);\n+ } finally {\n+ // this will also release the index searcher\n+ context.release();\n+ SearchContext.removeCurrent();\n+ }\n }\n \n \n @Override\n protected void shardOperationOnReplica(ReplicaOperationRequest shardRequest) {\n ShardDeleteByQueryRequest request = shardRequest.request;\n IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.request.index()).shardSafe(shardRequest.shardId);\n- Engine.DeleteByQuery deleteByQuery = indexShard.prepareDeleteByQuery(request.querySource(), request.filteringAliases(), request.types());\n- indexShard.deleteByQuery(deleteByQuery);\n+\n+ SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), request.index(), shardRequest.shardId);\n+ IndexService indexService = indicesService.indexServiceSafe(request.index());\n+ SearchContext context = new SearchContext(0,\n+ new ShardSearchRequest().types(request.types()).filteringAliases(request.filteringAliases()),\n+ shardTarget, indexShard.searcher(), indexService, indexShard,\n+ scriptService);\n+ SearchContext.setCurrent(context);\n+\n+ try {\n+ Engine.DeleteByQuery deleteByQuery = indexShard.prepareDeleteByQuery(request.querySource(), request.filteringAliases(), request.types());\n+ indexShard.deleteByQuery(deleteByQuery);\n+ } finally {\n+ // this will also release the index searcher\n+ context.release();\n+ SearchContext.removeCurrent();\n+ }\n }\n \n @Override", "filename": "src/main/java/org/elasticsearch/action/deletebyquery/TransportShardDeleteByQueryAction.java", "status": "modified" }, { "diff": "@@ -0,0 +1,50 @@\n+package org.elasticsearch.test.unit.index.query;\n+\n+import org.elasticsearch.action.deletebyquery.DeleteByQueryResponse;\n+import org.elasticsearch.action.deletebyquery.IndexDeleteByQueryResponse;\n+import org.elasticsearch.common.xcontent.XContentFactory;\n+import org.elasticsearch.index.query.HasParentQueryBuilder;\n+import org.elasticsearch.index.query.MatchAllQueryBuilder;\n+import org.elasticsearch.test.integration.AbstractNodesTests;\n+import org.testng.annotations.Test;\n+\n+import java.util.Map;\n+\n+import static org.elasticsearch.client.Requests.createIndexRequest;\n+import static org.elasticsearch.client.Requests.deleteByQueryRequest;\n+import static org.elasticsearch.client.Requests.putMappingRequest;\n+import static org.hamcrest.CoreMatchers.equalTo;\n+import static org.hamcrest.CoreMatchers.notNullValue;\n+import static org.hamcrest.MatcherAssert.assertThat;\n+\n+@Test\n+public class HasParentTests extends AbstractNodesTests {\n+\n+ @Test\n+ public void testHasParent() throws Exception {\n+ startNode(\"node1\");\n+ client(\"node1\").admin().indices().create(createIndexRequest(\"index\")).actionGet();\n+ client(\"node1\").admin().indices().putMapping(putMappingRequest(\"index\").type(\"parent\").source(XContentFactory.jsonBuilder()\n+ .startObject()\n+ .startObject(\"parent\")\n+ .endObject()\n+ .endObject()\n+ )).actionGet();\n+ client(\"node1\").admin().indices().putMapping(putMappingRequest(\"index\").type(\"child\").source(XContentFactory.jsonBuilder()\n+ .startObject()\n+ .startObject(\"child\")\n+ .startObject(\"_parent\")\n+ .field(\"type\", \"parent\")\n+ .endObject()\n+ .endObject()\n+ .endObject()\n+ )).actionGet();\n+ DeleteByQueryResponse response = client(\"node1\").deleteByQuery(deleteByQueryRequest(\"index\").query(new HasParentQueryBuilder(\"parent\", new MatchAllQueryBuilder()))).actionGet();\n+ assertThat(response, notNullValue());\n+ for (Map.Entry<String,IndexDeleteByQueryResponse> entry : response.getIndices().entrySet()) {\n+ IndexDeleteByQueryResponse subresp = entry.getValue();\n+ assertThat(subresp.getFailedShards(), equalTo(0));\n+ }\n+ }\n+\n+}", "filename": "src/test/java/org/elasticsearch/test/unit/index/query/HasParentTests.java", "status": "added" } ] }
{ "body": "When using `minimum_should_match` with a `multi_match` query, it is being applied to the ``bool` query which wraps the per-field queries. It should be applied to each per-field query instead:\n\n```\ncurl -XPOST 'http://127.0.0.1:9200/test/test?pretty=1' -d '\n{\n \"foo\" : \"one two three\"\n}\n'\n```\n\nWith a `match` query, the minimum of 70% doesn't find any results (correctly):\n\n```\ncurl -XGET 'http://127.0.0.1:9200/test/test/_search?pretty=1' -d '\n{\n \"query\" : {\n \"match\" : {\n \"foo\" : {\n \"minimum_should_match\" : \"70%\",\n \"query\" : \"three four five\"\n }\n }\n }\n}\n'\n\n# {\n# \"hits\" : {\n# \"hits\" : [],\n# \"max_score\" : null,\n# \"total\" : 0\n# },\n# \"timed_out\" : false,\n# \"_shards\" : {\n# \"failed\" : 0,\n# \"successful\" : 5,\n# \"total\" : 5\n# },\n# \"took\" : 10\n# }\n```\n\nWith `multi_match`, it finds results (incorrectly):\n\n```\ncurl -XGET 'http://127.0.0.1:9200/test/test/_search?pretty=1' -d '\n{\n \"query\" : {\n \"multi_match\" : {\n \"minimum_should_match\" : \"70%\",\n \"fields\" : [\n \"foo\",\n \"bar\"\n ],\n \"query\" : \"three four five\",\n \"use_dis_max\": true\n }\n }\n}\n'\n\n# {\n# \"hits\" : {\n# \"hits\" : [\n# {\n# \"_source\" : {\n# \"foo\" : \"one two three\"\n# },\n# \"_score\" : 0.009060421,\n# \"_index\" : \"test\",\n# \"_id\" : \"sa8shEUoR5SRtME0EA4Gyw\",\n# \"_type\" : \"test\"\n# }\n# ],\n# \"max_score\" : 0.009060421,\n# \"total\" : 1\n# },\n# \"timed_out\" : false,\n# \"_shards\" : {\n# \"failed\" : 0,\n# \"successful\" : 5,\n# \"total\" : 5\n# },\n# \"took\" : 5\n# } \n```\n", "comments": [ { "body": "awesome! i will look at this soon I hope!\n", "created_at": "2013-04-19T17:07:36Z" } ], "number": 2918, "title": "minimum_should_match applied to wrong query in multi_match" }
{ "body": "When specifying minimum_should_match in a multi_match query it was being applied\nto the outer bool query instead of to each of the inner field-specific bool queries.\n\nCloses #2918\n", "number": 2919, "review_comments": [], "title": "Apply minimum_should_match to inner clauses of multi_match query" }
{ "commits": [ { "message": "Apply minimum_should_match to inner clauses of multi_match query\nWhen specifying minimum_should_match in a multi_match query it was being applied\nto the outer bool query instead of to each of the inner field-specific bool queries.\n\nCloses #2918" }, { "message": "Added tests for multi_match with minimum_should_match" } ], "files": [ { "diff": "@@ -21,10 +21,8 @@\n \n import com.google.common.collect.Maps;\n import org.apache.lucene.search.BooleanClause;\n-import org.apache.lucene.search.BooleanQuery;\n import org.apache.lucene.search.Query;\n import org.elasticsearch.common.inject.Inject;\n-import org.elasticsearch.common.lucene.search.Queries;\n import org.elasticsearch.common.regex.Regex;\n import org.elasticsearch.common.xcontent.XContentParser;\n import org.elasticsearch.index.query.support.QueryParsers;\n@@ -172,15 +170,11 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n throw new QueryParsingException(parseContext.index(), \"No fields specified for match_all query\");\n }\n \n- Query query = multiMatchQuery.parse(type, fieldNameWithBoosts, value);\n+ Query query = multiMatchQuery.parse(type, fieldNameWithBoosts, value,minimumShouldMatch);\n if (query == null) {\n return null;\n }\n \n- if (query instanceof BooleanQuery) {\n- Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);\n- }\n-\n query.setBoost(boost);\n return query;\n }", "filename": "src/main/java/org/elasticsearch/index/query/MultiMatchQueryParser.java", "status": "modified" }, { "diff": "@@ -23,6 +23,7 @@\n import org.apache.lucene.search.BooleanQuery;\n import org.apache.lucene.search.DisjunctionMaxQuery;\n import org.apache.lucene.search.Query;\n+import org.elasticsearch.common.lucene.search.Queries;\n import org.elasticsearch.index.query.QueryParseContext;\n \n import java.io.IOException;\n@@ -45,17 +46,21 @@ public MultiMatchQuery(QueryParseContext parseContext) {\n super(parseContext);\n }\n \n- public Query parse(Type type, Map<String, Float> fieldNames, Object value) throws IOException {\n+ public Query parse(Type type, Map<String, Float> fieldNames, Object value, String minimumShouldMatch) throws IOException {\n if (fieldNames.size() == 1) {\n Map.Entry<String, Float> fieldBoost = fieldNames.entrySet().iterator().next();\n Float boostValue = fieldBoost.getValue();\n+ Query query;\n if (boostValue == null) {\n- return parse(type, fieldBoost.getKey(), value);\n+ query = parse(type, fieldBoost.getKey(), value);\n } else {\n- Query query = parse(type, fieldBoost.getKey(), value);\n+ query = parse(type, fieldBoost.getKey(), value);\n query.setBoost(boostValue);\n- return query;\n }\n+ if (query instanceof BooleanQuery) {\n+ Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);\n+ }\n+ return query;\n }\n \n if (useDisMax) {\n@@ -67,6 +72,9 @@ public Query parse(Type type, Map<String, Float> fieldNames, Object value) throw\n if (boostValue != null) {\n query.setBoost(boostValue);\n }\n+ if (query instanceof BooleanQuery) {\n+ Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);\n+ }\n if (query != null) {\n clauseAdded = true;\n disMaxQuery.add(query);\n@@ -81,6 +89,9 @@ public Query parse(Type type, Map<String, Float> fieldNames, Object value) throw\n if (boostValue != null) {\n query.setBoost(boostValue);\n }\n+ if (query instanceof BooleanQuery) {\n+ Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);\n+ }\n if (query != null) {\n booleanQuery.add(query, BooleanClause.Occur.SHOULD);\n }", "filename": "src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java", "status": "modified" }, { "diff": "@@ -658,6 +658,60 @@ public void testMultiMatchQueryZeroTermsQuery() {\n assertThat(searchResponse.getHits().totalHits(), equalTo(2l));\n }\n \n+ @Test\n+ public void testMultiMatchQueryMinShouldMatch() {\n+ try {\n+ client.admin().indices().prepareDelete(\"test\").execute().actionGet();\n+ } catch (Exception e) {\n+ // ignore\n+ }\n+\n+ client.admin().indices().prepareCreate(\"test\").setSettings(ImmutableSettings.settingsBuilder().put(\"index.number_of_shards\", 1)).execute().actionGet();\n+ client.prepareIndex(\"test\", \"type1\", \"1\").setSource(\"field1\", new String[]{\"value1\",\"value2\",\"value3\"}).execute().actionGet();\n+ client.prepareIndex(\"test\", \"type1\", \"2\").setSource(\"field2\", \"value1\").execute().actionGet();\n+ client.admin().indices().prepareRefresh(\"test\").execute().actionGet();\n+\n+ \tMultiMatchQueryBuilder multiMatchQuery = multiMatchQuery(\"value1 value2 foo\", \"field1\",\"field2\");\n+\n+ \tmultiMatchQuery.useDisMax(true);\n+ \tmultiMatchQuery.minimumShouldMatch(\"70%\");\n+ SearchResponse searchResponse = client.prepareSearch()\n+ .setQuery(multiMatchQuery)\n+ .execute().actionGet();\n+ assertThat(searchResponse.getHits().totalHits(), equalTo(1l));\n+\n+ \tmultiMatchQuery.minimumShouldMatch(\"30%\");\n+ searchResponse = client.prepareSearch()\n+ .setQuery(multiMatchQuery)\n+ .execute().actionGet();\n+ assertThat(searchResponse.getHits().totalHits(), equalTo(2l));\n+\n+ multiMatchQuery.useDisMax(false);\n+ \tmultiMatchQuery.minimumShouldMatch(\"70%\");\n+ searchResponse = client.prepareSearch()\n+ .setQuery(multiMatchQuery)\n+ .execute().actionGet();\n+ assertThat(searchResponse.getHits().totalHits(), equalTo(1l));\n+\n+ \tmultiMatchQuery.minimumShouldMatch(\"30%\");\n+ searchResponse = client.prepareSearch()\n+ .setQuery(multiMatchQuery)\n+ .execute().actionGet();\n+ assertThat(searchResponse.getHits().totalHits(), equalTo(2l));\n+\n+ multiMatchQuery = multiMatchQuery(\"value1 value2 bar\", \"field1\");\n+ \tmultiMatchQuery.minimumShouldMatch(\"100%\");\n+ searchResponse = client.prepareSearch()\n+ .setQuery(multiMatchQuery)\n+ .execute().actionGet();\n+ assertThat(searchResponse.getHits().totalHits(), equalTo(0l));\n+\n+ \tmultiMatchQuery.minimumShouldMatch(\"70%\");\n+ searchResponse = client.prepareSearch()\n+ .setQuery(multiMatchQuery)\n+ .execute().actionGet();\n+ assertThat(searchResponse.getHits().totalHits(), equalTo(1l));\n+}\n @Test\n public void testFuzzyQueryString() {\n client.admin().indices().prepareDelete().execute().actionGet();", "filename": "src/test/java/org/elasticsearch/test/integration/search/query/SimpleQueryTests.java", "status": "modified" } ] }
{ "body": "ignore_indices=missing doesn't fail if all the indices requested are not present, it defaults to searching everything which is far from desirable, specially when doing a heavy facet (e.g. termstats) in a daily index case (e.g. logs_2013-04-01) and there is no data indexed for the day\n\nTo reproduce start a node and index any document to index \"A\" then curl this:\n\ncurl 'http://localhost:9200/B,C/_search?ignore_indices=missing&pretty'\n\nTried on 0.20.6 this returns the document just indexed to index \"A\".\n\nIf that is the expected behavior for the 'missing' value then I suggest adding another value to ignore_indices, for example \"ignore_indices=missing_some\" that will fail if none of the queried indices are there\n", "comments": [ { "body": "This isn't expected behaviour. I think the best thing if none of the specified indices exists, is to return an error.\n", "created_at": "2013-04-02T12:19:29Z" }, { "body": "@mahdeto Thanks for reporting this issue! The fix (based on your fix) will be included in the next release.\n", "created_at": "2013-04-02T17:08:07Z" }, { "body": "Sweet :) glad I could help :) this was my first contrib to ES and hopefully\nwon't be the last.\n\nOn Tue, Apr 2, 2013 at 7:08 PM, Martijn van Groningen <\nnotifications@github.com> wrote:\n\n> @mahdeto https://github.com/mahdeto Thanks for reporting this issue!\n> The fix (based on your fix) will be included in the next release.\n> \n> —\n> Reply to this email directly or view it on GitHubhttps://github.com/elasticsearch/elasticsearch/issues/2837#issuecomment-15788731\n> .\n", "created_at": "2013-04-02T17:52:22Z" } ], "number": 2837, "title": "ignore_indices doesn't fail if all the indices requested are not present" }
{ "body": "setting ignore_indices=missing will fail your query if all the indices are missing as opposed to making your query run against everything.. fixes #2837\n", "number": 2844, "review_comments": [], "title": "Fix for #2837 by checking if all the indices were missing and failing that." }
{ "commits": [ { "message": "setting ignore_indices=missing will fail your query if all the indices are missing as opposed to making your query run against everything.. fixes #2837" } ], "files": [ { "diff": "@@ -505,6 +505,10 @@ public String[] concreteIndices(String[] aliasesOrIndices, IgnoreIndices ignoreI\n String[] actualLst = aliasAndIndexToIndexMap.get(aliasOrIndex);\n if (actualLst == null) {\n if (ignoreIndices == IgnoreIndices.MISSING) {\n+ \t//if nothing was found but something requested break for this will cause you to search everything instead\n+ if(aliasesOrIndices != null && aliasesOrIndices.length > 0) {\n+ \tthrow new IndexMissingException(new Index(Arrays.toString(aliasesOrIndices)));\n+ }\n return Strings.EMPTY_ARRAY;\n }\n throw new IndexMissingException(new Index(aliasOrIndex));\n@@ -539,6 +543,12 @@ public String[] concreteIndices(String[] aliasesOrIndices, IgnoreIndices ignoreI\n }\n }\n }\n+ \n+ //if nothing was found but something requested break for this will cause you to search everything instead\n+ if(aliasesOrIndices != null && aliasesOrIndices.length > 0 && actualIndices.isEmpty()) {\n+ \tthrow new IndexMissingException(new Index(Arrays.toString(aliasesOrIndices)));\n+ }\n+ \n return actualIndices.toArray(new String[actualIndices.size()]);\n }\n ", "filename": "src/main/java/org/elasticsearch/cluster/metadata/MetaData.java", "status": "modified" }, { "diff": "@@ -52,6 +52,28 @@ public void closeNodes() {\n closeAllNodes();\n }\n \n+ @Test\n+ public void testAllMissing() throws Exception {\n+ \tclient.admin().indices().prepareDelete().execute().actionGet();\n+ \tclient.admin().indices().prepareCreate(\"test1\").execute().actionGet();\n+ ClusterHealthResponse clusterHealthResponse = client.admin().cluster().prepareHealth().setWaitForYellowStatus().execute().actionGet();\n+ assertThat(clusterHealthResponse.isTimedOut(), equalTo(false));\n+ try {\n+ client.prepareSearch(\"test2\").setQuery(QueryBuilders.matchAllQuery()).setIgnoreIndices(IgnoreIndices.MISSING).execute().actionGet();\n+ fail(\"Exception should have been thrown.\");\n+ } catch (IndexMissingException e) {\n+ }\n+ \n+ try {\n+ client.prepareSearch(\"test2\",\"test3\").setQuery(QueryBuilders.matchAllQuery()).setIgnoreIndices(IgnoreIndices.MISSING).execute().actionGet();\n+ fail(\"Exception should have been thrown.\");\n+ } catch (IndexMissingException e) {\n+ }\n+ \n+ //you should still be able to run empty searches without things blowing up\n+ client.prepareSearch().setQuery(QueryBuilders.matchAllQuery()).setIgnoreIndices(IgnoreIndices.MISSING).execute().actionGet();\n+ }\n+ \n @Test\n public void testMissing() throws Exception {\n client.admin().indices().prepareDelete().execute().actionGet();", "filename": "src/test/java/org/elasticsearch/test/integration/indices/IgnoreIndicesTests.java", "status": "modified" }, { "diff": "@@ -24,8 +24,11 @@\n import org.elasticsearch.cluster.metadata.IndexMetaData;\n import org.elasticsearch.cluster.metadata.MetaData;\n import org.elasticsearch.common.settings.ImmutableSettings;\n+import org.elasticsearch.indices.IndexMissingException;\n import org.testng.annotations.Test;\n \n+import com.google.common.collect.Sets;\n+\n import static com.google.common.collect.Sets.newHashSet;\n import static org.hamcrest.MatcherAssert.assertThat;\n import static org.hamcrest.Matchers.equalTo;\n@@ -66,4 +69,40 @@ public void convertWildcardsTests() {\n private IndexMetaData.Builder indexBuilder(String index) {\n return IndexMetaData.builder(index).settings(ImmutableSettings.settingsBuilder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0));\n }\n+ \n+ @Test(expectedExceptions = IndexMissingException.class)\n+ public void concreteIndicesIgnoreIndicesOneMissingIndex() {\n+ \t MetaData.Builder mdBuilder = MetaData.builder()\n+ .put(indexBuilder(\"testXXX\"))\n+ .put(indexBuilder(\"kuku\"));\n+ MetaData md = mdBuilder.build();\n+ md.concreteIndices(new String[]{\"testZZZ\"}, IgnoreIndices.MISSING, true);\n+ }\n+ \n+ @Test\n+ public void concreteIndicesIgnoreIndicesOneMissingIndexOtherFound() {\n+ \t MetaData.Builder mdBuilder = MetaData.builder()\n+ .put(indexBuilder(\"testXXX\"))\n+ .put(indexBuilder(\"kuku\"));\n+ MetaData md = mdBuilder.build();\n+ assertThat(newHashSet(md.concreteIndices(new String[]{\"testXXX\",\"testZZZ\"}, IgnoreIndices.MISSING, true)), equalTo(newHashSet(\"testXXX\")));\n+ }\n+\n+ @Test(expectedExceptions = IndexMissingException.class)\n+ public void concreteIndicesIgnoreIndicesAllMissing() {\n+ \t MetaData.Builder mdBuilder = MetaData.builder()\n+ .put(indexBuilder(\"testXXX\"))\n+ .put(indexBuilder(\"kuku\"));\n+ MetaData md = mdBuilder.build();\n+ assertThat(newHashSet(md.concreteIndices(new String[]{\"testMo\",\"testMahdy\"}, IgnoreIndices.MISSING, true)), equalTo(newHashSet(\"testXXX\")));\n+ }\n+ \n+ @Test\n+ public void concreteIndicesIgnoreIndicesEmptyRequest() {\n+ \t MetaData.Builder mdBuilder = MetaData.builder()\n+ .put(indexBuilder(\"testXXX\"))\n+ .put(indexBuilder(\"kuku\"));\n+ MetaData md = mdBuilder.build();\n+ assertThat(newHashSet(md.concreteIndices(new String[]{}, IgnoreIndices.MISSING, true)), equalTo(Sets.<String>newHashSet(\"kuku\",\"testXXX\")));\n+ }\n }", "filename": "src/test/java/org/elasticsearch/test/unit/cluster/metadata/MetaDataTests.java", "status": "modified" } ] }
{ "body": "It appears that the new SimpleRateLimiter that was added to Lucene 4.x returns the targetNS (meaning the time when the method should exit) instead of the duration that was actually paused. This then leads to StoreStats overflowing the throttleTimeInNanos which then causes serialization issues with writeVLong (arithmetic exceptions, etc)\n", "comments": [ { "body": "good catch!! I will look into this asap!\n", "created_at": "2013-03-15T21:08:51Z" }, { "body": "here is the lucene issue including a patch to push this fix upstream: https://issues.apache.org/jira/browse/LUCENE-4836\n", "created_at": "2013-03-15T22:32:20Z" }, { "body": "thank you very much for figuring this out and reporting this! Very much appreciated!\n", "created_at": "2013-03-15T22:39:11Z" }, { "body": "indeed, and I must add to that, very impressive in nailing down why it happens.\n", "created_at": "2013-03-15T23:18:31Z" }, { "body": "And thank you for getting this fixed so quickly.\n", "created_at": "2013-03-17T04:44:06Z" } ], "number": 2785, "title": "StoreStats's throttleTimeInNanos overflows causing serialization issues" }
{ "body": "Closes #2785\n", "number": 2787, "review_comments": [], "title": "Fix bug in RateLimiter.SimpleRateLimiter causing numeric overflow in StoreStats" }
{ "commits": [ { "message": "Fix bug in RateLimiter.SimpleRateLimiter causing numeric overflow in StoreStats\n\nCloses #2785" } ], "files": [ { "diff": "@@ -53,8 +53,8 @@ public static Type fromString(String type) throws ElasticSearchIllegalArgumentEx\n }\n }\n \n- private final RateLimiter.SimpleRateLimiter rateLimiter = new RateLimiter.SimpleRateLimiter(0);\n- private volatile RateLimiter.SimpleRateLimiter actualRateLimiter;\n+ private final XSimpleRateLimiter rateLimiter = new XSimpleRateLimiter(0);\n+ private volatile XSimpleRateLimiter actualRateLimiter;\n \n private volatile Type type;\n ", "filename": "src/main/java/org/apache/lucene/store/StoreRateLimiting.java", "status": "modified" }, { "diff": "@@ -0,0 +1,98 @@\n+/*\n+ * Licensed to ElasticSearch and Shay Banon under one\n+ * or more contributor license agreements. See the NOTICE file\n+ * distributed with this work for additional information\n+ * regarding copyright ownership. ElasticSearch licenses this\n+ * file to you under the Apache License, Version 2.0 (the\n+ * \"License\"); you may not use this file except in compliance\n+ * with the License. You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing,\n+ * software distributed under the License is distributed on an\n+ * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n+ * KIND, either express or implied. See the License for the\n+ * specific language governing permissions and limitations\n+ * under the License.\n+ */\n+package org.apache.lucene.store;\n+\n+import org.apache.lucene.util.ThreadInterruptedException;\n+\n+// LUCENE UPGRADE - this is a copy of a RateLimiter.SimpleRateLimiter fixing bug #2785 Lucene 4.3 should fix that\n+public final class XSimpleRateLimiter extends RateLimiter {\n+ private volatile double mbPerSec;\n+ private volatile double nsPerByte;\n+ private volatile long lastNS;\n+\n+ // TODO: we could also allow eg a sub class to dynamically\n+ // determine the allowed rate, eg if an app wants to\n+ // change the allowed rate over time or something\n+\n+ /** mbPerSec is the MB/sec max IO rate */\n+ public XSimpleRateLimiter(double mbPerSec) {\n+ setMbPerSec(mbPerSec);\n+ }\n+\n+ /**\n+ * Sets an updated mb per second rate limit.\n+ */\n+ @Override\n+ public void setMbPerSec(double mbPerSec) {\n+ this.mbPerSec = mbPerSec;\n+ nsPerByte = 1000000000. / (1024*1024*mbPerSec);\n+ \n+ }\n+\n+ /**\n+ * The current mb per second rate limit.\n+ */\n+ @Override\n+ public double getMbPerSec() {\n+ return this.mbPerSec;\n+ }\n+ \n+ /** Pauses, if necessary, to keep the instantaneous IO\n+ * rate at or below the target. NOTE: multiple threads\n+ * may safely use this, however the implementation is\n+ * not perfectly thread safe but likely in practice this\n+ * is harmless (just means in some rare cases the rate\n+ * might exceed the target). It's best to call this\n+ * with a biggish count, not one byte at a time.\n+ * @return the pause time in nano seconds \n+ * */\n+ @Override\n+ public long pause(long bytes) {\n+ if (bytes == 1) {\n+ return 0;\n+ }\n+\n+ // TODO: this is purely instantaneous rate; maybe we\n+ // should also offer decayed recent history one?\n+ final long targetNS = lastNS = lastNS + ((long) (bytes * nsPerByte));\n+ final long startNs;\n+ long curNS = startNs = System.nanoTime();\n+ if (lastNS < curNS) {\n+ lastNS = curNS;\n+ }\n+\n+ // While loop because Thread.sleep doesn't always sleep\n+ // enough:\n+ while(true) {\n+ final long pauseNS = targetNS - curNS;\n+ if (pauseNS > 0) {\n+ try {\n+ Thread.sleep((int) (pauseNS/1000000), (int) (pauseNS % 1000000));\n+ } catch (InterruptedException ie) {\n+ throw new ThreadInterruptedException(ie);\n+ }\n+ curNS = System.nanoTime();\n+ continue;\n+ }\n+ break;\n+ }\n+ \n+ return curNS - startNs;\n+ }\n+ }\n\\ No newline at end of file", "filename": "src/main/java/org/apache/lucene/store/XSimpleRateLimiter.java", "status": "added" }, { "diff": "@@ -21,6 +21,7 @@\n \n import com.google.common.base.Objects;\n import org.apache.lucene.store.RateLimiter;\n+import org.apache.lucene.store.XSimpleRateLimiter;\n import org.elasticsearch.common.component.AbstractComponent;\n import org.elasticsearch.common.inject.Inject;\n import org.elasticsearch.common.settings.Settings;\n@@ -53,7 +54,7 @@ public class RecoverySettings extends AbstractComponent {\n private final ThreadPoolExecutor concurrentStreamPool;\n \n private volatile ByteSizeValue maxSizePerSec;\n- private volatile RateLimiter.SimpleRateLimiter rateLimiter;\n+ private volatile XSimpleRateLimiter rateLimiter;\n \n @Inject\n public RecoverySettings(Settings settings, NodeSettingsService nodeSettingsService) {\n@@ -71,7 +72,7 @@ public RecoverySettings(Settings settings, NodeSettingsService nodeSettingsServi\n if (maxSizePerSec.bytes() <= 0) {\n rateLimiter = null;\n } else {\n- rateLimiter = new RateLimiter.SimpleRateLimiter(maxSizePerSec.mbFrac());\n+ rateLimiter = new XSimpleRateLimiter(maxSizePerSec.mbFrac());\n }\n \n logger.debug(\"using max_size_per_sec[{}], concurrent_streams [{}], file_chunk_size [{}], translog_size [{}], translog_ops [{}], and compress [{}]\",\n@@ -130,7 +131,7 @@ public void onRefreshSettings(Settings settings) {\n } else if (rateLimiter != null) {\n rateLimiter.setMbPerSec(maxSizePerSec.mbFrac());\n } else {\n- rateLimiter = new RateLimiter.SimpleRateLimiter(maxSizePerSec.mbFrac());\n+ rateLimiter = new XSimpleRateLimiter(maxSizePerSec.mbFrac());\n }\n }\n ", "filename": "src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java", "status": "modified" }, { "diff": "@@ -0,0 +1,62 @@\n+/*\n+ * Licensed to ElasticSearch and Shay Banon under one\n+ * or more contributor license agreements. See the NOTICE file\n+ * distributed with this work for additional information\n+ * regarding copyright ownership. ElasticSearch licenses this\n+ * file to you under the Apache License, Version 2.0 (the\n+ * \"License\"); you may not use this file except in compliance\n+ * with the License. You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing,\n+ * software distributed under the License is distributed on an\n+ * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n+ * KIND, either express or implied. See the License for the\n+ * specific language governing permissions and limitations\n+ * under the License.\n+ */\n+package org.apache.lucene.store;\n+\n+import java.util.concurrent.TimeUnit;\n+\n+import org.testng.annotations.Test;\n+import org.apache.lucene.store.RateLimiter.SimpleRateLimiter;\n+import org.apache.lucene.util.Version;\n+import org.elasticsearch.common.lucene.Lucene;\n+\n+import static org.hamcrest.MatcherAssert.assertThat;\n+import static org.hamcrest.Matchers.lessThan;\n+import static org.hamcrest.Matchers.greaterThan;\n+\n+public class XSimpleRateLimiterTest {\n+\n+ @Test\n+ public void testPause() {\n+ XSimpleRateLimiter limiter = new XSimpleRateLimiter(10); // 10 MB / Sec\n+ limiter.pause(2);//init\n+ long pause = 0;\n+ for (int i = 0; i < 3; i++) {\n+ pause += limiter.pause(4 * 1024 * 1024); // fire up 3 * 4 MB \n+ }\n+ final long convert = TimeUnit.MILLISECONDS.convert(pause, TimeUnit.NANOSECONDS);\n+ assertThat(convert, lessThan(2000l)); // more than 2 seconds should be an error here!\n+ assertThat(convert, greaterThan(1000l)); // we should sleep at lease 1 sec\n+ }\n+ \n+ @Test\n+ public void testPauseLucene() {\n+ if (Version.LUCENE_42 != Lucene.VERSION) { // once we upgrade test the lucene impl again\n+ SimpleRateLimiter limiter = new SimpleRateLimiter(10); // 10 MB / Sec\n+ limiter.pause(2);//init\n+ long pause = 0;\n+ for (int i = 0; i < 3; i++) {\n+ pause += limiter.pause(4 * 1024 * 1024); // fire up 3 * 4 MB \n+ }\n+ final long convert = TimeUnit.MILLISECONDS.convert(pause, TimeUnit.NANOSECONDS);\n+ assertThat(convert, lessThan(2000l)); // more than 2 seconds should be an error here!\n+ assertThat(convert, greaterThan(1000l)); // we should sleep at lease 1 sec\n+ assert false : \"Upgrade XSimpleRateLimiter to Lucene SimpleRateLimiter\";\n+ }\n+ }\n+}", "filename": "src/test/java/org/apache/lucene/store/XSimpleRateLimiterTest.java", "status": "added" } ] }
{ "body": "When I run a `has_child` query on 0.20.5, it is returning both the child and the parent:\n\n```\ncurl -XPUT 'http://127.0.0.1:9200/test/?pretty=1' -d '\n{\n \"mappings\" : {\n \"test\" : {\n \"_parent\" : {\n \"type\" : \"foo\"\n }\n }\n }\n}\n'\n\ncurl -XPUT 'http://127.0.0.1:9200/test/foo/1?pretty=1' -d '\n{\n \"foo\" : 1\n}\n'\n\n# {\n# \"ok\" : true,\n# \"_index\" : \"test\",\n# \"_id\" : \"1\",\n# \"_type\" : \"foo\",\n# \"_version\" : 1\n# }\n\ncurl -XPOST 'http://127.0.0.1:9200/test/test?parent=1&pretty=1' -d '\n{\n \"foo\" : 1\n}\n'\n\n# {\n# \"ok\" : true,\n# \"_index\" : \"test\",\n# \"_id\" : \"GBEoJRcGQnCDN93_JiUAKQ\",\n# \"_type\" : \"test\",\n# \"_version\" : 1\n# }\n\n\ncurl -XGET 'http://127.0.0.1:9200/test/_search?pretty=1' -d '\n{\n \"query\" : {\n \"has_child\" : {\n \"query\" : {\n \"match\" : {\n \"foo\" : 1\n }\n },\n \"type\" : \"test\"\n }\n }\n}\n'\n\n# {\n# \"hits\" : {\n# \"hits\" : [\n# {\n# \"_source\" : {\n# \"foo\" : 1\n# },\n# \"_score\" : 1,\n# \"_index\" : \"test\",\n# \"_id\" : \"1\",\n# \"_type\" : \"foo\"\n# },\n# {\n# \"_source\" : {\n# \"foo\" : 1\n# },\n# \"_score\" : 1,\n# \"_index\" : \"test\",\n# \"_id\" : \"GBEoJRcGQnCDN93_JiUAKQ\",\n# \"_type\" : \"test\"\n# }\n# ],\n# \"max_score\" : 1,\n# \"total\" : 2\n# },\n# \"timed_out\" : false,\n# \"_shards\" : {\n# \"failed\" : 0,\n# \"successful\" : 5,\n# \"total\" : 5\n# },\n# \"took\" : 3\n# }\n```\n", "comments": [ { "body": "this should only return the parent? Excuse my naive question ;) \n", "created_at": "2013-03-09T13:46:13Z" }, { "body": "Yes\n", "created_at": "2013-03-11T14:01:03Z" }, { "body": "ok ;) do you know if this happens on master too?\n", "created_at": "2013-03-11T14:53:07Z" } ], "number": 2744, "title": "has_child returns parent and child" }
{ "body": "Closes #2744\n", "number": 2760, "review_comments": [], "title": "Check if hit has a parent ID in collector" }
{ "commits": [ { "message": "Introdue ParentIdCollector that collects only if the parent ID is non null ie. if the document has a parent.\n\nCloses #2744" } ], "files": [ { "diff": "@@ -319,30 +319,25 @@ public int advance(int target) throws IOException {\n }\n }\n \n- static class ChildUidCollector extends NoopCollector {\n+ static class ChildUidCollector extends ParentIdCollector {\n \n final TObjectFloatHashMap<HashedBytesArray> uidToScore;\n final ScoreType scoreType;\n- final SearchContext searchContext;\n- final String childType;\n-\n Scorer scorer;\n- IdReaderTypeCache typeCache;\n \n ChildUidCollector(ScoreType scoreType, SearchContext searchContext, String childType, TObjectFloatHashMap<HashedBytesArray> uidToScore) {\n+ super(childType, searchContext);\n this.uidToScore = uidToScore;\n this.scoreType = scoreType;\n- this.searchContext = searchContext;\n- this.childType = childType;\n }\n \n @Override\n- public void collect(int doc) throws IOException {\n- if (typeCache == null) {\n- return;\n- }\n+ public void setScorer(Scorer scorer) throws IOException {\n+ this.scorer = scorer;\n+ }\n \n- HashedBytesArray parentUid = typeCache.parentIdByDoc(doc);\n+ @Override\n+ protected void collect(int doc, HashedBytesArray parentUid) throws IOException {\n float previousScore = uidToScore.get(parentUid);\n float currentScore = scorer.score();\n if (previousScore == 0) {\n@@ -357,23 +352,19 @@ public void collect(int doc) throws IOException {\n uidToScore.put(parentUid, currentScore);\n }\n break;\n+ case AVG:\n+ assert false : \"AVG has it's own collector\";\n+ \n+ default:\n+ assert false : \"Are we missing a sore type here? -- \" + scoreType;\n+ break;\n }\n }\n }\n \n- @Override\n- public void setScorer(Scorer scorer) throws IOException {\n- this.scorer = scorer;\n- }\n-\n- @Override\n- public void setNextReader(AtomicReaderContext context) throws IOException {\n- typeCache = searchContext.idCache().reader(context.reader()).type(childType);\n- }\n-\n }\n \n- static class AvgChildUidCollector extends ChildUidCollector {\n+ final static class AvgChildUidCollector extends ChildUidCollector {\n \n final TObjectIntHashMap<HashedBytesArray> uidToCount;\n \n@@ -384,12 +375,7 @@ static class AvgChildUidCollector extends ChildUidCollector {\n }\n \n @Override\n- public void collect(int doc) throws IOException {\n- if (typeCache == null) {\n- return;\n- }\n-\n- HashedBytesArray parentUid = typeCache.parentIdByDoc(doc);\n+ protected void collect(int doc, HashedBytesArray parentUid) throws IOException {\n float previousScore = uidToScore.get(parentUid);\n float currentScore = scorer.score();\n if (previousScore == 0) {", "filename": "src/main/java/org/elasticsearch/index/search/child/ChildrenQuery.java", "status": "modified" }, { "diff": "@@ -102,7 +102,7 @@ public void contextClear() {\n collectedUids = null;\n }\n \n- static class ParentDocSet extends MatchDocIdSet {\n+ final static class ParentDocSet extends MatchDocIdSet {\n \n final IndexReader reader;\n final THashSet<HashedBytesArray> parents;\n@@ -121,33 +121,19 @@ protected boolean matchDoc(int doc) {\n }\n }\n \n- static class UidCollector extends NoopCollector {\n-\n- final String parentType;\n- final SearchContext context;\n- final THashSet<HashedBytesArray> collectedUids;\n-\n- private IdReaderTypeCache typeCache;\n+ final static class UidCollector extends ParentIdCollector {\n+ private final THashSet<HashedBytesArray> collectedUids;\n \n UidCollector(String parentType, SearchContext context, THashSet<HashedBytesArray> collectedUids) {\n- this.parentType = parentType;\n- this.context = context;\n+ super(parentType, context);\n this.collectedUids = collectedUids;\n }\n \n @Override\n- public void collect(int doc) throws IOException {\n- // It can happen that for particular segment no document exist for an specific type. This prevents NPE\n- if (typeCache != null) {\n- collectedUids.add(typeCache.parentIdByDoc(doc));\n- }\n-\n+ public void collect(int doc, HashedBytesArray parentIdByDoc){\n+ collectedUids.add(parentIdByDoc);\n }\n \n- @Override\n- public void setNextReader(AtomicReaderContext readerContext) throws IOException {\n- typeCache = context.idCache().reader(readerContext.reader()).type(parentType);\n- }\n }\n }\n }", "filename": "src/main/java/org/elasticsearch/index/search/child/HasChildFilter.java", "status": "modified" }, { "diff": "@@ -0,0 +1,59 @@\n+/*\n+ * Licensed to ElasticSearch and Shay Banon under one\n+ * or more contributor license agreements. See the NOTICE file\n+ * distributed with this work for additional information\n+ * regarding copyright ownership. ElasticSearch licenses this\n+ * file to you under the Apache License, Version 2.0 (the\n+ * \"License\"); you may not use this file except in compliance\n+ * with the License. You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing,\n+ * software distributed under the License is distributed on an\n+ * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n+ * KIND, either express or implied. See the License for the\n+ * specific language governing permissions and limitations\n+ * under the License.\n+ */\n+package org.elasticsearch.index.search.child;\n+\n+import java.io.IOException;\n+\n+import org.apache.lucene.index.AtomicReaderContext;\n+import org.elasticsearch.common.bytes.HashedBytesArray;\n+import org.elasticsearch.common.lucene.search.NoopCollector;\n+import org.elasticsearch.index.cache.id.IdReaderTypeCache;\n+import org.elasticsearch.search.internal.SearchContext;\n+\n+/**\n+ * A simple collector that only collects if the docs parent ID is not\n+ * <code>null</code>\n+ */\n+abstract class ParentIdCollector extends NoopCollector {\n+ protected final String type;\n+ protected final SearchContext context;\n+ private IdReaderTypeCache typeCache;\n+\n+ protected ParentIdCollector(String parentType, SearchContext context) {\n+ this.type = parentType;\n+ this.context = context;\n+ }\n+\n+ @Override\n+ public final void collect(int doc) throws IOException {\n+ if (typeCache != null) {\n+ HashedBytesArray parentIdByDoc = typeCache.parentIdByDoc(doc);\n+ if (parentIdByDoc != null) {\n+ collect(doc, parentIdByDoc);\n+ }\n+ }\n+ }\n+ \n+ protected abstract void collect(int doc, HashedBytesArray parentId) throws IOException;\n+\n+ @Override\n+ public void setNextReader(AtomicReaderContext readerContext) throws IOException {\n+ typeCache = context.idCache().reader(readerContext.reader()).type(type);\n+ }\n+}", "filename": "src/main/java/org/elasticsearch/index/search/child/ParentIdCollector.java", "status": "added" }, { "diff": "@@ -19,6 +19,7 @@\n \n package org.elasticsearch.test.integration.search.child;\n \n+import org.elasticsearch.ElasticSearchException;\n import org.elasticsearch.action.count.CountResponse;\n import org.elasticsearch.action.search.SearchResponse;\n import org.elasticsearch.action.search.SearchType;\n@@ -35,6 +36,7 @@\n import org.testng.annotations.BeforeClass;\n import org.testng.annotations.Test;\n \n+import java.io.IOException;\n import java.util.ArrayList;\n import java.util.Arrays;\n import java.util.List;\n@@ -110,6 +112,33 @@ public void multiLevelChild() throws Exception {\n assertThat(\"Failures \" + Arrays.toString(searchResponse.getShardFailures()), searchResponse.getShardFailures().length, equalTo(0));\n assertThat(searchResponse.getHits().totalHits(), equalTo(1l));\n }\n+ \n+ \n+ @Test // see #2744\n+ public void test2744() throws ElasticSearchException, IOException {\n+ client.admin().indices().prepareDelete().execute().actionGet();\n+\n+ client.admin().indices().prepareCreate(\"test\")\n+ .setSettings(\n+ ImmutableSettings.settingsBuilder()\n+ .put(\"index.number_of_shards\", 1)\n+ .put(\"index.number_of_replicas\", 0)\n+ ).execute().actionGet();\n+ client.admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();\n+ client.admin().indices().preparePutMapping(\"test\").setType(\"test\").setSource(jsonBuilder().startObject().startObject(\"type\")\n+ .startObject(\"_parent\").field(\"type\", \"foo\").endObject()\n+ .endObject().endObject()).execute().actionGet();\n+\n+ // index simple data\n+ client.prepareIndex(\"test\", \"foo\", \"1\").setSource(\"foo\", 1).execute().actionGet();\n+ client.prepareIndex(\"test\", \"test\").setSource(\"foo\", 1).setParent(\"1\").execute().actionGet();\n+ client.admin().indices().prepareRefresh().execute().actionGet();\n+ SearchResponse searchResponse = client.prepareSearch(\"test\").setQuery(hasChildQuery(\"test\", matchQuery(\"foo\", 1))).execute().actionGet();\n+ assertThat(searchResponse.getFailedShards(), equalTo(0));\n+ assertThat(searchResponse.getHits().totalHits(), equalTo(1l));\n+ assertThat(searchResponse.getHits().getAt(0).id(), equalTo(\"1\"));\n+\n+ }\n \n @Test\n public void simpleChildQuery() throws Exception {", "filename": "src/test/java/org/elasticsearch/test/integration/search/child/SimpleChildQuerySearchTests.java", "status": "modified" } ] }
{ "body": "In version `0.9.0.Beta1` there is an interchange between `field_data.memory_size` and `field_data.memory_size_in_bytes`.\n\nTo reproduce:\n1. curl `http://localhost:9200/_nodes/_local/stats?pretty=true`\n\nActual result:\n\n```\n \"field_data\" : {\n \"memory_size\" : 257199478,\n \"memory_size_in_bytes\" : \"245.2mb\"\n },\n```\n\nExpected result:\n\n```\n \"field_data\" : {\n \"memory_size\" : \"245.2mb\",\n \"memory_size_in_bytes\" : 257199478\n },\n```\n\nHere is a pull request: https://github.com/elasticsearch/elasticsearch/pull/2725\n", "comments": [ { "body": "makes perfect sense, thanks!\n", "created_at": "2013-03-04T10:24:46Z" }, { "body": "thanks @gakhov \n", "created_at": "2013-03-04T10:43:13Z" }, { "body": "I am going to change the `field_data` part to `fielddata`, though less readable it matches our configuration options that match it...\n", "created_at": "2013-03-04T22:49:11Z" }, { "body": "good to know ... right now we use that information for monitoring our clusters ... this change will require us to rewrite our monitoring plugins (for `munin`, for instance) to support 2 versions ... not big deal, but still ...\n", "created_at": "2013-03-05T09:11:13Z" }, { "body": "@gakhov thats why we are still not GA with it, I think its a good change if we want to be consistent across the board with how we name references to the `fielddata` component.\n", "created_at": "2013-03-06T01:16:33Z" } ], "number": 2724, "title": "Interchanged values in field_data stats" }
{ "body": "fixes issue #2724\n", "number": 2725, "review_comments": [], "title": "fixed interchanged values in field_data stats " }
{ "commits": [ { "message": "fixed interchanged values in field_data stats fixes #2724" } ], "files": [ { "diff": "@@ -83,8 +83,8 @@ public void writeTo(StreamOutput out) throws IOException {\n @Override\n public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {\n builder.startObject(Fields.FIELD_DATA);\n- builder.field(Fields.MEMORY_SIZE, memorySize);\n- builder.field(Fields.MEMORY_SIZE_IN_BYTES, getMemorySize().toString());\n+ builder.field(Fields.MEMORY_SIZE, getMemorySize().toString());\n+ builder.field(Fields.MEMORY_SIZE_IN_BYTES, memorySize);\n builder.field(Fields.EVICTIONS, getEvictions());\n builder.endObject();\n return builder;", "filename": "src/main/java/org/elasticsearch/index/fielddata/FieldDataStats.java", "status": "modified" } ] }
{ "body": "```\ncurl -XPUT 'http://127.0.0.1:9200/test/?pretty=1' -d '\n{\n \"mappings\" : {\n \"test\" : {\n \"_id\" : {\n \"path\" : \"foo.bar\"\n }\n }\n }\n}\n'\n```\n\nThis works correctly:\n\n```\ncurl -XPOST 'http://127.0.0.1:9200/test/test?pretty=1' -d '\n{\n \"foo\" : {\n \"bar\" : 1,\n \"baz\" : \"xx\"\n }\n}\n'\n\n# [Fri Sep 21 11:14:21 2012] Response:\n# {\n# \"ok\" : true,\n# \"_index\" : \"test\",\n# \"_id\" : \"1\",\n# \"_type\" : \"test\",\n# \"_version\" : 1\n# }\n```\n\nThis should throw an error, not set the `_id` to `[`:\n\n```\ncurl -XPOST 'http://127.0.0.1:9200/test/test?pretty=1' -d '\n{\n \"foo\" : {\n \"bar\" : [\n 2\n ],\n \"baz\" : \"xx\"\n }\n}\n'\n\n# [Fri Sep 21 11:14:23 2012] Response:\n# {\n# \"ok\" : true,\n# \"_index\" : \"test\",\n# \"_id\" : \"[\",\n# \"_type\" : \"test\",\n# \"_version\" : 1\n# }\n```\n", "comments": [], "number": 2275, "title": "The `_id` path should not allow arrays" }
{ "body": "Closes #2275\n", "number": 2715, "review_comments": [], "title": "Fail in metadata parsing if the id path is not a value but rather an array or an object." }
{ "commits": [ { "message": "Fail in metadata parsing if the id path is not a value but rather an array or an object.\n\nCloses #2275" } ], "files": [ { "diff": "@@ -33,6 +33,7 @@\n import org.elasticsearch.common.xcontent.XContentHelper;\n import org.elasticsearch.common.xcontent.XContentParser;\n import org.elasticsearch.index.mapper.DocumentMapper;\n+import org.elasticsearch.index.mapper.MapperParsingException;\n import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;\n \n import java.io.IOException;\n@@ -442,6 +443,9 @@ private void innerParse(XContentParser parser, ParseContext context) throws IOEx\n boolean incLocationTimestamp = false;\n if (context.idParsingStillNeeded() && fieldName.equals(idPart)) {\n if (context.locationId + 1 == id.pathElements().length) {\n+ if (!t.isValue()) {\n+ throw new MapperParsingException(\"id field must be a value but was either an object or an array\");\n+ }\n context.id = parser.textOrNull();\n context.idResolved = true;\n } else {", "filename": "src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java", "status": "modified" }, { "diff": "@@ -22,6 +22,7 @@\n import org.elasticsearch.cluster.metadata.MappingMetaData;\n import org.elasticsearch.common.compress.CompressedString;\n import org.elasticsearch.common.xcontent.XContentFactory;\n+import org.elasticsearch.index.mapper.MapperParsingException;\n import org.testng.annotations.Test;\n \n import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;\n@@ -49,6 +50,33 @@ public void testParseIdAlone() throws Exception {\n assertThat(parseContext.timestamp(), nullValue());\n assertThat(parseContext.timestampResolved(), equalTo(false));\n }\n+ \n+ @Test\n+ public void testFailIfIdIsNoValue() throws Exception {\n+ MappingMetaData md = new MappingMetaData(\"type1\", new CompressedString(\"\"),\n+ new MappingMetaData.Id(\"id\"),\n+ new MappingMetaData.Routing(true, \"routing\"),\n+ new MappingMetaData.Timestamp(true, \"timestamp\", \"dateOptionalTime\"));\n+ byte[] bytes = jsonBuilder().startObject().field(\"field1\", \"value1\").field(\"field2\", \"value2\")\n+ .startArray(\"id\").value(\"id\").endArray().field(\"routing\", \"routing_value\").field(\"timestamp\", \"1\").endObject().bytes().toBytes();\n+ MappingMetaData.ParseContext parseContext = md.createParseContext(null, \"routing_value\", \"1\");\n+ try {\n+ md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);\n+ assert false;\n+ } catch (MapperParsingException ex) {\n+ // bogus its an array\n+ }\n+ \n+ bytes = jsonBuilder().startObject().field(\"field1\", \"value1\").field(\"field2\", \"value2\")\n+ .startObject(\"id\").field(\"x\", \"id\").endObject().field(\"routing\", \"routing_value\").field(\"timestamp\", \"1\").endObject().bytes().toBytes();\n+ parseContext = md.createParseContext(null, \"routing_value\", \"1\");\n+ try {\n+ md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);\n+ assert false;\n+ } catch (MapperParsingException ex) {\n+ // bogus its an object\n+ }\n+ }\n \n @Test\n public void testParseRoutingAlone() throws Exception {", "filename": "src/test/java/org/elasticsearch/test/unit/cluster/metadata/MappingMetaDataParserTests.java", "status": "modified" } ] }
{ "body": "In the query_string query, `lowercase_expanded_terms` applies to wildcards, but not to fuzzy terms:\n\n```\ncurl -XGET 'http://127.0.0.1:9200/test/test/_validate/query?pretty=1&explain=true' -d '\n{\n \"field\" : {\n \"t\" : {\n \"query\" : \"full text Saerch~2 Wild*\",\n \"default_operator\" : \"AND\"\n }\n }\n}\n'\n\n# {\n# \"_shards\" : {\n# \"failed\" : 0,\n# \"successful\" : 1,\n# \"total\" : 1\n# },\n# \"explanations\" : [\n# {\n# \"index\" : \"test\",\n# \"explanation\" : \"+t:full +t:text +t:Saerch~2 +t:wild*\",\n# \"valid\" : true\n# }\n# ],\n# \"valid\" : true\n# }\n```\n\nNote: it isn't just validate that shows this - it is borne out in tests\n", "comments": [], "number": 2566, "title": "Make lowercase_expanded_terms apply to fuzzy words in query_string" }
{ "body": "Fixes #2566\n", "number": 2567, "review_comments": [], "title": "Respect lowercase_expanded_terms in MappingQueryParser" }
{ "commits": [ { "message": "Respect lowercase_expanded_terms in MappingQueryParser\n\nFixes #2566" } ], "files": [ { "diff": "@@ -294,6 +294,10 @@ protected Query getRangeQuery(String field, String part1, String part2, boolean\n if (\"*\".equals(part2)) {\n part2 = null;\n }\n+ if (lowercaseExpandedTerms) {\n+ part1 = part1==null ? null : part1.toLowerCase(locale);\n+ part2 = part2==null ? null : part2.toLowerCase(locale);\n+ }\n Collection<String> fields = extractMultiFields(field);\n if (fields != null) {\n if (fields.size() == 1) {\n@@ -354,6 +358,9 @@ private Query getRangeQuerySingle(String field, String part1, String part2, bool\n \n @Override\n protected Query getFuzzyQuery(String field, String termStr, float minSimilarity) throws ParseException {\n+ if (lowercaseExpandedTerms) {\n+ termStr = termStr.toLowerCase(locale);\n+ }\n Collection<String> fields = extractMultiFields(field);\n if (fields != null) {\n if (fields.size() == 1) {\n@@ -421,6 +428,9 @@ protected Query newFuzzyQuery(Term term, float minimumSimilarity, int prefixLeng\n \n @Override\n protected Query getPrefixQuery(String field, String termStr) throws ParseException {\n+ if (lowercaseExpandedTerms) {\n+ termStr = termStr.toLowerCase(locale);\n+ }\n Collection<String> fields = extractMultiFields(field);\n if (fields != null) {\n if (fields.size() == 1) {\n@@ -569,6 +579,9 @@ protected Query getWildcardQuery(String field, String termStr) throws ParseExcep\n return fieldQueryExtensions.get(ExistsFieldQueryExtension.NAME).query(parseContext, actualField);\n }\n }\n+ if (lowercaseExpandedTerms) {\n+ termStr = termStr.toLowerCase(locale);\n+ }\n Collection<String> fields = extractMultiFields(field);\n if (fields != null) {\n if (fields.size() == 1) {", "filename": "src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java", "status": "modified" }, { "diff": "@@ -160,6 +160,34 @@ public void queryStringAnalyzedWildcard() throws Exception {\n searchResponse = client.prepareSearch().setQuery(queryString(\"v?l*e?1\").analyzeWildcard(true)).execute().actionGet();\n assertThat(searchResponse.hits().totalHits(), equalTo(1l));\n }\n+ \n+ @Test\n+ public void testLowercaseExpandedTerms() {\n+ try {\n+ client.admin().indices().prepareDelete(\"test\").execute().actionGet();\n+ } catch (Exception e) {\n+ // ignore\n+ }\n+\n+ client.admin().indices().prepareCreate(\"test\").setSettings(ImmutableSettings.settingsBuilder().put(\"number_of_shards\", 1)).execute().actionGet();\n+\n+ client.prepareIndex(\"test\", \"type1\", \"1\").setSource(\"field1\", \"value_1\", \"field2\", \"value_2\").execute().actionGet();\n+\n+ client.admin().indices().prepareRefresh().execute().actionGet();\n+\n+ SearchResponse searchResponse = client.prepareSearch().setQuery(queryString(\"VALUE_3~1\").lowercaseExpandedTerms(true)).execute().actionGet();\n+ assertThat(searchResponse.hits().totalHits(), equalTo(1l));\n+ searchResponse = client.prepareSearch().setQuery(queryString(\"VALUE_3~1\").lowercaseExpandedTerms(false)).execute().actionGet();\n+ assertThat(searchResponse.hits().totalHits(), equalTo(0l));\n+ searchResponse = client.prepareSearch().setQuery(queryString(\"ValUE_*\").lowercaseExpandedTerms(true)).execute().actionGet();\n+ assertThat(searchResponse.hits().totalHits(), equalTo(1l));\n+ searchResponse = client.prepareSearch().setQuery(queryString(\"vAl*E_1\")).execute().actionGet();\n+ assertThat(searchResponse.hits().totalHits(), equalTo(1l));\n+ searchResponse = client.prepareSearch().setQuery(queryString(\"[VALUE_1 TO VALUE_3]\")).execute().actionGet();\n+ assertThat(searchResponse.hits().totalHits(), equalTo(1l));\n+ searchResponse = client.prepareSearch().setQuery(queryString(\"[VALUE_1 TO VALUE_3]\").lowercaseExpandedTerms(false)).execute().actionGet();\n+ assertThat(searchResponse.hits().totalHits(), equalTo(0l));\n+ }\n \n @Test\n public void typeFilterTypeIndexedTests() throws Exception {", "filename": "src/test/java/org/elasticsearch/test/integration/search/query/SimpleQueryTests.java", "status": "modified" }, { "diff": "@@ -93,7 +93,7 @@ public void simpleIdTests() {\n searchResponse = client.prepareSearch().setQuery(QueryBuilders.prefixQuery(\"_id\", \"XXX\")).execute().actionGet();\n assertThat(searchResponse.hits().totalHits(), equalTo(1l));\n \n- searchResponse = client.prepareSearch().setQuery(QueryBuilders.queryString(\"_id:XXX*\")).execute().actionGet();\n+ searchResponse = client.prepareSearch().setQuery(QueryBuilders.queryString(\"_id:XXX*\").lowercaseExpandedTerms(false)).execute().actionGet();\n assertThat(searchResponse.hits().totalHits(), equalTo(1l));\n }\n ", "filename": "src/test/java/org/elasticsearch/test/integration/search/simple/SimpleSearchTests.java", "status": "modified" } ] }
{ "body": "Repro: https://gist.github.com/4165996\n\nExamples of the words that should be stemmed by [Porter](http://snowball.tartarus.org/algorithms/porter/stemmer.html) and [Porter2](http://snowball.tartarus.org/algorithms/english/stemmer.html) stemmers differently \n\n```\ninput porter porter2\n----------- ----------- -------\nconsolingly consolingli consol\nhis hi his\nknightly knightli knight\nstayed stai stay\n```\n\nSee also: https://groups.google.com/d/topic/elasticsearch/HEW3Q9F4ocM/discussion\n", "comments": [ { "body": "I think you have it right. Currently (without the fix) the `porter` and `porter2` stemmers map to the `porter` stemmer. The `english` stemmer maps to the `porter2` stemmer.\n\nI also did a bit more investigation and I think the lovins stemmer may have a problem too. The proper output for the lovins stemmer is: 'consol', 'hi', 'knight', 'stay', however I remember getting something different. I'll test it out and let you know.\n", "created_at": "2012-11-29T03:06:46Z" }, { "body": "After some discussions we came to the conclusion that it would be safer to just [remove reference](https://github.com/elasticsearch/elasticsearch.github.com/commit/288dccaa0c22637b93ae7909f061aad2840f2c3a) to the `porter2` stemmer from documentation. Changing stemmer in elasticsearch might adversely affect users who are currently using it. Whoever really needs the `porter2` stemmer can simply use the `english` stemmer instead. \n", "created_at": "2012-12-03T16:08:23Z" }, { "body": "That seems like the right move. If you could add a sentence explaining that `english` is implemented by the `porter2` stemmer, that would be nice.\n\nAs I am sure you already know, the reason it's important to be clear about the implementation is because sometimes you have to do stemming on the ES client side and you need to be sure that the client stemmer matches the ES stemmer.\n", "created_at": "2012-12-03T18:23:55Z" }, { "body": "I [added links](https://github.com/elasticsearch/elasticsearch.github.com/commit/870121f8dc7c3f3282079269b3bcbe5b655c101e) to stemming algorithms. Closing this issue. \n", "created_at": "2013-02-09T19:37:24Z" } ], "number": 2451, "title": "The Porter2 Stemmer Token Filter is just Porter Stemmer" }
{ "body": "...er Stemmer\n\nFixes #2451\n", "number": 2452, "review_comments": [], "title": " Porter2 Stemmer Token Filter should use English stemmer instead of Port..." }
{ "commits": [ { "message": " Porter2 Stemmer Token Filter should use English stemmer instead of Porter Stemmer\n\n Fixes #2451" } ], "files": [ { "diff": "@@ -113,7 +113,7 @@ public TokenStream create(TokenStream tokenStream) {\n } else if (\"porter\".equalsIgnoreCase(language)) {\n return new PorterStemFilter(tokenStream);\n } else if (\"porter2\".equalsIgnoreCase(language)) {\n- return new SnowballFilter(tokenStream, new PorterStemmer());\n+ return new SnowballFilter(tokenStream, new EnglishStemmer());\n } else if (\"portuguese\".equalsIgnoreCase(language)) {\n return new SnowballFilter(tokenStream, new PortugueseStemmer());\n } else if (\"romanian\".equalsIgnoreCase(language)) {", "filename": "src/main/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactory.java", "status": "modified" }, { "diff": "@@ -0,0 +1,70 @@\n+/*\n+ * Licensed to ElasticSearch and Shay Banon under one\n+ * or more contributor license agreements. See the NOTICE file\n+ * distributed with this work for additional information\n+ * regarding copyright ownership. ElasticSearch licenses this\n+ * file to you under the Apache License, Version 2.0 (the\n+ * \"License\"); you may not use this file except in compliance\n+ * with the License. You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing,\n+ * software distributed under the License is distributed on an\n+ * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n+ * KIND, either express or implied. See the License for the\n+ * specific language governing permissions and limitations\n+ * under the License.\n+ */\n+\n+package org.elasticsearch.test.unit.index.analysis;\n+\n+import org.elasticsearch.common.inject.Injector;\n+import org.elasticsearch.common.inject.ModulesBuilder;\n+import org.elasticsearch.common.settings.Settings;\n+import org.elasticsearch.common.settings.SettingsModule;\n+import org.elasticsearch.env.Environment;\n+import org.elasticsearch.env.EnvironmentModule;\n+import org.elasticsearch.index.Index;\n+import org.elasticsearch.index.IndexNameModule;\n+import org.elasticsearch.index.analysis.AnalysisModule;\n+import org.elasticsearch.index.analysis.AnalysisService;\n+import org.elasticsearch.index.analysis.NamedAnalyzer;\n+import org.elasticsearch.index.settings.IndexSettingsModule;\n+import org.elasticsearch.indices.analysis.IndicesAnalysisModule;\n+import org.elasticsearch.indices.analysis.IndicesAnalysisService;\n+import org.testng.annotations.Test;\n+\n+import java.io.StringReader;\n+\n+import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;\n+import static org.elasticsearch.test.unit.index.analysis.AnalysisTestsHelper.assertSimpleTSOutput;\n+\n+/**\n+ */\n+public class PorterTokenFilterTests {\n+\n+ @Test\n+ public void testPorter2Filter() throws Exception {\n+ Index index = new Index(\"test\");\n+ Settings settings = settingsBuilder().loadFromClasspath(\"org/elasticsearch/test/unit/index/analysis/porter.json\").build();\n+ Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings)), new IndicesAnalysisModule()).createInjector();\n+ Injector injector = new ModulesBuilder().add(\n+ new IndexSettingsModule(index, settings),\n+ new IndexNameModule(index),\n+ new AnalysisModule(settings, parentInjector.getInstance(IndicesAnalysisService.class)))\n+ .createChildInjector(parentInjector);\n+\n+ AnalysisService analysisService = injector.getInstance(AnalysisService.class);\n+\n+ NamedAnalyzer analyzer1 = analysisService.analyzer(\"porter1\");\n+\n+ // http://snowball.tartarus.org/algorithms/porter/stemmer.html\n+ assertSimpleTSOutput(analyzer1.tokenStream(\"test\", new StringReader(\"consolingly his knightly stayed\")), new String[]{\"consolingli\", \"hi\", \"knightli\", \"stai\"});\n+\n+ NamedAnalyzer analyzer2 = analysisService.analyzer(\"porter2\");\n+ // http://snowball.tartarus.org/algorithms/english/stemmer.html\n+ assertSimpleTSOutput(analyzer2.tokenStream(\"test\", new StringReader(\"consolingly his knightly stayed\")), new String[]{\"consol\", \"his\", \"knight\", \"stay\"});\n+ }\n+\n+}", "filename": "src/test/java/org/elasticsearch/test/unit/index/analysis/PorterTokenFilterTests.java", "status": "added" }, { "diff": "@@ -0,0 +1,26 @@\n+{\n+ \"index\":{\n+ \"analysis\":{\n+ \"analyzer\":{\n+ \"porter1\":{\n+ \"tokenizer\":\"standard\",\n+ \"filter\":[\"lowercase\", \"porter1\"]\n+ },\n+ \"porter2\":{\n+ \"tokenizer\":\"standard\",\n+ \"filter\":[\"lowercase\", \"porter2\"]\n+ }\n+ },\n+ \"filter\":{\n+ \"porter1\":{\n+ \"type\":\"stemmer\",\n+ \"name\":\"porter\"\n+ },\n+ \"porter2\":{\n+ \"type\":\"stemmer\",\n+ \"name\":\"porter2\"\n+ }\n+ }\n+ }\n+ }\n+}\n\\ No newline at end of file", "filename": "src/test/java/org/elasticsearch/test/unit/index/analysis/porter.json", "status": "added" } ] }
{ "body": "Repro: https://gist.github.com/fa7219f4f7fb8d9435ad\n", "comments": [], "number": 2441, "title": "The relevancy score in explanation of custom_filters_query doesn’t match the actual score" }
{ "body": "...om_filters_query\n\nFixes #2441\n", "number": 2442, "review_comments": [], "title": "The relevancy score in explanation should match the actual score in cust..." }
{ "commits": [ { "message": "The relevancy score in explanation should match the actual score in custom_filters_query\n\nFixes #2441" } ], "files": [ { "diff": "@@ -164,7 +164,11 @@ public Explanation explain(AtomicReaderContext context, int doc) throws IOExcept\n if (docSet.get(doc)) {\n filterFunction.function.setNextReader(context);\n Explanation functionExplanation = filterFunction.function.explainFactor(doc);\n- float sc = getBoost() * subQueryExpl.getValue() * functionExplanation.getValue();\n+ float factor = functionExplanation.getValue();\n+ if (factor > maxBoost) {\n+ factor = maxBoost;\n+ }\n+ float sc = getBoost() * factor;\n Explanation filterExplanation = new ComplexExplanation(true, sc, \"custom score, product of:\");\n filterExplanation.addDetail(new Explanation(1.0f, \"match filter: \" + filterFunction.filter.toString()));\n filterExplanation.addDetail(functionExplanation);", "filename": "src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java", "status": "modified" }, { "diff": "@@ -84,7 +84,7 @@ public void testScoreExplainBug_2283() throws Exception {\n .setQuery(customFiltersScoreQuery(matchAllQuery())\n .add(termFilter(\"field\", \"value4\"), \"2\")\n .add(termFilter(\"field\", \"value2\"), \"3\")\n- .scoreMode(\"first\"))\n+ .scoreMode(\"first\"))\n .setExplain(true)\n .execute().actionGet();\n \n@@ -114,7 +114,7 @@ public void testScoreExplainBug_2283() throws Exception {\n .add(termFilter(\"field\", \"value4\"), \"2\")\n .add(termFilter(\"field\", \"value2\"), \"3\")\n .boost(2)\n- .scoreMode(\"first\"))\n+ .scoreMode(\"first\"))\n .setExplain(true)\n .execute().actionGet();\n \n@@ -364,5 +364,48 @@ public void testCustomFiltersScore() throws Exception {\n assertThat(searchResponse.hits().getAt(2).score(), equalTo(2.0f));\n assertThat(searchResponse.hits().getAt(3).id(), equalTo(\"2\"));\n assertThat(searchResponse.hits().getAt(3).score(), equalTo(1.0f));\n+\n+ searchResponse = client.prepareSearch(\"test\")\n+ .setQuery(customFiltersScoreQuery(termsQuery(\"field\", \"value1\", \"value2\", \"value3\", \"value4\")).scoreMode(\"first\")\n+ .add(termFilter(\"field\", \"value4\"), 2)\n+ .add(termFilter(\"field\", \"value3\"), 3)\n+ .add(termFilter(\"field\", \"value2\"), 4))\n+ .setExplain(true)\n+ .execute().actionGet();\n+\n+ assertThat(Arrays.toString(searchResponse.shardFailures()), searchResponse.failedShards(), equalTo(0));\n+ assertThat(searchResponse.hits().totalHits(), equalTo(4l));\n+ assertThat(searchResponse.hits().getAt(0).id(), equalTo(\"2\"));\n+ assertThat(searchResponse.hits().getAt(0).score(), equalTo(searchResponse.hits().getAt(0).explanation().getValue()));\n+ logger.info(\"--> Hit[0] {} Explanation {}\", searchResponse.hits().getAt(0).id(), searchResponse.hits().getAt(0).explanation());\n+ assertThat(searchResponse.hits().getAt(1).id(), equalTo(\"3\"));\n+ assertThat(searchResponse.hits().getAt(1).score(), equalTo(searchResponse.hits().getAt(1).explanation().getValue()));\n+ assertThat(searchResponse.hits().getAt(2).id(), equalTo(\"4\"));\n+ assertThat(searchResponse.hits().getAt(2).score(), equalTo(searchResponse.hits().getAt(2).explanation().getValue()));\n+ assertThat(searchResponse.hits().getAt(3).id(), equalTo(\"1\"));\n+ assertThat(searchResponse.hits().getAt(3).score(), equalTo(searchResponse.hits().getAt(3).explanation().getValue()));\n+\n+\n+ searchResponse = client.prepareSearch(\"test\")\n+ .setQuery(customFiltersScoreQuery(termsQuery(\"field\", \"value1\", \"value2\", \"value3\", \"value4\")).scoreMode(\"multiply\")\n+ .add(termFilter(\"field\", \"value4\"), 2)\n+ .add(termFilter(\"field\", \"value1\"), 3)\n+ .add(termFilter(\"color\", \"red\"), 5))\n+ .setExplain(true)\n+ .execute().actionGet();\n+\n+ assertThat(Arrays.toString(searchResponse.shardFailures()), searchResponse.failedShards(), equalTo(0));\n+ assertThat(searchResponse.hits().totalHits(), equalTo(4l));\n+ assertThat(searchResponse.hits().getAt(0).id(), equalTo(\"1\"));\n+ assertThat(searchResponse.hits().getAt(0).score(), equalTo(searchResponse.hits().getAt(0).explanation().getValue()));\n+ logger.info(\"--> Hit[0] {} Explanation {}\", searchResponse.hits().getAt(0).id(), searchResponse.hits().getAt(0).explanation());\n+ assertThat(searchResponse.hits().getAt(1).id(), equalTo(\"3\"));\n+ assertThat(searchResponse.hits().getAt(1).score(), equalTo(searchResponse.hits().getAt(1).explanation().getValue()));\n+ assertThat(searchResponse.hits().getAt(2).id(), equalTo(\"4\"));\n+ assertThat(searchResponse.hits().getAt(2).score(), equalTo(searchResponse.hits().getAt(2).explanation().getValue()));\n+ assertThat(searchResponse.hits().getAt(3).id(), equalTo(\"2\"));\n+ assertThat(searchResponse.hits().getAt(3).score(), equalTo(searchResponse.hits().getAt(3).explanation().getValue()));\n+\n+\n }\n }\n\\ No newline at end of file", "filename": "src/test/java/org/elasticsearch/test/integration/search/customscore/CustomScoreSearchTests.java", "status": "modified" } ] }
{ "body": "I accidentally use boost syntax in a field of prefix query and got a NullPointerException.\n\nWhen doing the same with match query i simply got an empty answer so i suppose the behavior is not expected.\n\nQuery and stack trace are in the following gist ... https://gist.github.com/4059732\n\nES version is 0.19.10\n\nRegards.\n\nBenoît\n", "comments": [], "number": 2408, "title": "NullPointerException with prefix query" }
{ "body": "...eption\n\nFixes #2408\n", "number": 2409, "review_comments": [], "title": "Using non-mapped fields in prefix queries shouldn't cause NullPointerExc..." }
{ "commits": [ { "message": "Using non-mapped fields in prefix queries shouldn't cause NullPointerException\n\nFixes #2408" } ], "files": [ { "diff": "@@ -109,7 +109,9 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars\n }\n if (query == null) {\n PrefixQuery prefixQuery = new PrefixQuery(new Term(fieldName, value));\n- prefixQuery.setRewriteMethod(method);\n+ if (method != null) {\n+ prefixQuery.setRewriteMethod(method);\n+ }\n query = prefixQuery;\n }\n query.setBoost(boost);", "filename": "src/main/java/org/elasticsearch/index/query/PrefixQueryParser.java", "status": "modified" }, { "diff": "@@ -617,6 +617,16 @@ public void testPrefixQueryBoostQuery() throws IOException {\n assertThat((double) prefixQuery.getBoost(), closeTo(2.0, 0.01));\n }\n \n+ @Test\n+ public void testPrefixQueryWithUnknownField() throws IOException {\n+ IndexQueryParserService queryParser = queryParser();\n+ Query parsedQuery = queryParser.parse(prefixQuery(\"unknown\", \"sh\")).query();\n+ assertThat(parsedQuery, instanceOf(PrefixQuery.class));\n+ PrefixQuery prefixQuery = (PrefixQuery) parsedQuery;\n+ assertThat(prefixQuery.getPrefix(), equalTo(new Term(\"unknown\", \"sh\")));\n+ assertThat(prefixQuery.getRewriteMethod(), notNullValue());\n+ }\n+\n @Test\n public void testWildcardQueryBuilder() throws IOException {\n IndexQueryParserService queryParser = queryParser();", "filename": "src/test/java/org/elasticsearch/test/unit/index/query/SimpleIndexQueryParserTests.java", "status": "modified" } ] }
{ "body": "The new settings `index.routing.allocation.require....` and `cluster.routing.allocation.require....` allow to specify conditions, all of which have to be satisfied for a shard to be allocation on a given node. \n\nThis commit introduces the following algorithm for determining if a shard can be allocated on a node. For allocation to be allowed the node has to satisfy ALL of the following conditions:\n- if the `index.routing.allocation.require....` or `cluster.routing.allocation.require....` lists are not empty, all filters must match the node\n- if the `index.routing.allocation.include....` or `cluster.routing.allocation.include....` lists are not empty, at list one filter must match the node\n- if the `index.routing.allocation.exclude....` or `cluster.routing.allocation.exclude....` lists are not empty, no filters must match the node\n\nDue to a bug in the elasticsearch this commit introduces two potentially breaking changes. Prior to this commit, elasticsearch was looking only at the first setting in include and exclude groups. All other settings were essentially ignored. After this change all include and exclude settings will be considered. \n\nThe second breaking change is caused by difference in treatment of space in `index.routing.allocation.exclude....` or `cluster.routing.allocation.exclude....` settings. Prior to this commit, space was matching any node, after this commit space will remove corresponding `index.routing.allocation.exclude....` or `cluster.routing.allocation.exclude....` setting from consideration.\n", "comments": [], "number": 2404, "title": "Shard Allocation: add index.routing.allocation.require.... and cluster.routing.allocation.require.... setting" }
{ "body": "…ation.require....` settings\n\nFixes #2404\n", "number": 2392, "review_comments": [], "title": "Add `index.routing.allocation.require....` and `cluster.routing.alloc… " }
{ "commits": [ { "message": "Add `index.routing.allocation.require....` and `cluster.routing.allocation.require....` settings\n\nFixes #2404" } ], "files": [ { "diff": "@@ -47,6 +47,7 @@\n import java.io.IOException;\n import java.util.*;\n \n+import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.*;\n import static org.elasticsearch.common.settings.ImmutableSettings.*;\n \n /**\n@@ -197,6 +198,7 @@ public static State fromString(String state) {\n \n private transient final int totalNumberOfShards;\n \n+ private final DiscoveryNodeFilters requireFilters;\n private final DiscoveryNodeFilters includeFilters;\n private final DiscoveryNodeFilters excludeFilters;\n \n@@ -213,17 +215,23 @@ private IndexMetaData(String index, long version, State state, Settings settings\n \n this.aliases = aliases;\n \n+ ImmutableMap<String, String> requireMap = settings.getByPrefix(\"index.routing.allocation.require.\").getAsMap();\n+ if (requireMap.isEmpty()) {\n+ requireFilters = null;\n+ } else {\n+ requireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap);\n+ }\n ImmutableMap<String, String> includeMap = settings.getByPrefix(\"index.routing.allocation.include.\").getAsMap();\n if (includeMap.isEmpty()) {\n includeFilters = null;\n } else {\n- includeFilters = DiscoveryNodeFilters.buildFromKeyValue(includeMap);\n+ includeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap);\n }\n ImmutableMap<String, String> excludeMap = settings.getByPrefix(\"index.routing.allocation.exclude.\").getAsMap();\n if (excludeMap.isEmpty()) {\n excludeFilters = null;\n } else {\n- excludeFilters = DiscoveryNodeFilters.buildFromKeyValue(excludeMap);\n+ excludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, excludeMap);\n }\n }\n \n@@ -332,6 +340,11 @@ public <T extends Custom> T custom(String type) {\n return (T) customs.get(type);\n }\n \n+ @Nullable\n+ public DiscoveryNodeFilters requireFilters() {\n+ return requireFilters;\n+ }\n+\n @Nullable\n public DiscoveryNodeFilters includeFilters() {\n return includeFilters;", "filename": "src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java", "status": "modified" }, { "diff": "@@ -19,7 +19,6 @@\n \n package org.elasticsearch.cluster.node;\n \n-import com.google.common.collect.ImmutableMap;\n import org.elasticsearch.common.Strings;\n import org.elasticsearch.common.regex.Regex;\n import org.elasticsearch.common.settings.Settings;\n@@ -32,13 +31,16 @@\n */\n public class DiscoveryNodeFilters {\n \n- public static final DiscoveryNodeFilters NO_FILTERS = new DiscoveryNodeFilters(ImmutableMap.<String, String[]>of());\n+ public static enum OpType {\n+ AND,\n+ OR\n+ };\n \n- public static DiscoveryNodeFilters buildFromSettings(String prefix, Settings settings) {\n- return buildFromKeyValue(settings.getByPrefix(prefix).getAsMap());\n+ public static DiscoveryNodeFilters buildFromSettings(OpType opType, String prefix, Settings settings) {\n+ return buildFromKeyValue(opType, settings.getByPrefix(prefix).getAsMap());\n }\n \n- public static DiscoveryNodeFilters buildFromKeyValue(Map<String, String> filters) {\n+ public static DiscoveryNodeFilters buildFromKeyValue(OpType opType, Map<String, String> filters) {\n Map<String, String[]> bFilters = new HashMap<String, String[]>();\n for (Map.Entry<String, String> entry : filters.entrySet()) {\n String[] values = Strings.splitStringByCommaToArray(entry.getValue());\n@@ -47,76 +49,123 @@ public static DiscoveryNodeFilters buildFromKeyValue(Map<String, String> filters\n }\n }\n if (bFilters.isEmpty()) {\n- return NO_FILTERS;\n+ return null;\n }\n- return new DiscoveryNodeFilters(bFilters);\n+ return new DiscoveryNodeFilters(opType, bFilters);\n }\n \n private final Map<String, String[]> filters;\n \n- DiscoveryNodeFilters(Map<String, String[]> filters) {\n+ private final OpType opType;\n+\n+ DiscoveryNodeFilters(OpType opType, Map<String, String[]> filters) {\n+ this.opType = opType;\n this.filters = filters;\n }\n \n public boolean match(DiscoveryNode node) {\n- if (filters.isEmpty()) {\n- return true;\n- }\n for (Map.Entry<String, String[]> entry : filters.entrySet()) {\n String attr = entry.getKey();\n String[] values = entry.getValue();\n if (\"_ip\".equals(attr)) {\n if (!(node.address() instanceof InetSocketTransportAddress)) {\n- return false;\n+ if (opType == OpType.AND) {\n+ return false;\n+ } else {\n+ continue;\n+ }\n }\n InetSocketTransportAddress inetAddress = (InetSocketTransportAddress) node.address();\n for (String value : values) {\n if (Regex.simpleMatch(value, inetAddress.address().getAddress().getHostAddress())) {\n- return true;\n+ if (opType == OpType.OR) {\n+ return true;\n+ }\n+ } else {\n+ if (opType == OpType.AND) {\n+ return false;\n+ }\n }\n }\n- return false;\n } else if (\"_host\".equals(attr)) {\n if (!(node.address() instanceof InetSocketTransportAddress)) {\n- return false;\n+ if (opType == OpType.AND) {\n+ return false;\n+ } else {\n+ continue;\n+ }\n }\n InetSocketTransportAddress inetAddress = (InetSocketTransportAddress) node.address();\n for (String value : values) {\n if (Regex.simpleMatch(value, inetAddress.address().getHostName())) {\n- return true;\n+ if (opType == OpType.OR) {\n+ return true;\n+ }\n+ } else {\n+ if (opType == OpType.AND) {\n+ return false;\n+ }\n }\n if (Regex.simpleMatch(value, inetAddress.address().getAddress().getHostAddress())) {\n- return true;\n+ if (opType == OpType.OR) {\n+ return true;\n+ }\n+ } else {\n+ if (opType == OpType.AND) {\n+ return false;\n+ }\n }\n }\n- return false;\n } else if (\"_id\".equals(attr)) {\n for (String value : values) {\n if (node.id().equals(value)) {\n- return true;\n+ if (opType == OpType.OR) {\n+ return true;\n+ }\n+ } else {\n+ if (opType == OpType.AND) {\n+ return false;\n+ }\n }\n }\n- return false;\n } else if (\"_name\".equals(attr) || \"name\".equals(attr)) {\n for (String value : values) {\n if (Regex.simpleMatch(value, node.name())) {\n- return true;\n+ if (opType == OpType.OR) {\n+ return true;\n+ }\n+ } else {\n+ if (opType == OpType.AND) {\n+ return false;\n+ }\n }\n }\n- return false;\n } else {\n String nodeAttributeValue = node.attributes().get(attr);\n if (nodeAttributeValue == null) {\n- return false;\n+ if (opType == OpType.AND) {\n+ return false;\n+ } else {\n+ continue;\n+ }\n }\n for (String value : values) {\n if (Regex.simpleMatch(value, nodeAttributeValue)) {\n- return true;\n+ if (opType == OpType.OR) {\n+ return true;\n+ }\n+ } else {\n+ if (opType == OpType.AND) {\n+ return false;\n+ }\n }\n }\n- return false;\n }\n }\n- return true;\n+ if (opType == OpType.OR) {\n+ return false;\n+ } else {\n+ return true;\n+ }\n }\n }", "filename": "src/main/java/org/elasticsearch/cluster/node/DiscoveryNodeFilters.java", "status": "modified" }, { "diff": "@@ -30,38 +30,49 @@\n import org.elasticsearch.common.settings.Settings;\n import org.elasticsearch.node.settings.NodeSettingsService;\n \n+import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.*;\n+\n /**\n */\n public class FilterAllocationDecider extends AllocationDecider {\n \n static {\n MetaData.addDynamicSettings(\n+ \"cluster.routing.allocation.require.*\",\n \"cluster.routing.allocation.include.*\",\n \"cluster.routing.allocation.exclude.*\"\n );\n IndexMetaData.addDynamicSettings(\n+ \"index.routing.allocation.require.*\",\n \"index.routing.allocation.include.*\",\n \"index.routing.allocation.exclude.*\"\n );\n }\n \n+ private volatile DiscoveryNodeFilters clusterRequireFilters;\n private volatile DiscoveryNodeFilters clusterIncludeFilters;\n private volatile DiscoveryNodeFilters clusterExcludeFilters;\n \n @Inject\n public FilterAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) {\n super(settings);\n+ ImmutableMap<String, String> requireMap = settings.getByPrefix(\"cluster.routing.allocation.require.\").getAsMap();\n+ if (requireMap.isEmpty()) {\n+ clusterRequireFilters = null;\n+ } else {\n+ clusterRequireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap);\n+ }\n ImmutableMap<String, String> includeMap = settings.getByPrefix(\"cluster.routing.allocation.include.\").getAsMap();\n if (includeMap.isEmpty()) {\n clusterIncludeFilters = null;\n } else {\n- clusterIncludeFilters = DiscoveryNodeFilters.buildFromKeyValue(includeMap);\n+ clusterIncludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap);\n }\n ImmutableMap<String, String> excludeMap = settings.getByPrefix(\"cluster.routing.allocation.exclude.\").getAsMap();\n if (excludeMap.isEmpty()) {\n clusterExcludeFilters = null;\n } else {\n- clusterExcludeFilters = DiscoveryNodeFilters.buildFromKeyValue(excludeMap);\n+ clusterExcludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, excludeMap);\n }\n nodeSettingsService.addListener(new ApplySettings());\n }\n@@ -77,6 +88,11 @@ public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAl\n }\n \n private boolean shouldFilter(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {\n+ if (clusterRequireFilters != null) {\n+ if (!clusterRequireFilters.match(node.node())) {\n+ return true;\n+ }\n+ }\n if (clusterIncludeFilters != null) {\n if (!clusterIncludeFilters.match(node.node())) {\n return true;\n@@ -89,6 +105,11 @@ private boolean shouldFilter(ShardRouting shardRouting, RoutingNode node, Routin\n }\n \n IndexMetaData indexMd = allocation.routingNodes().metaData().index(shardRouting.index());\n+ if (indexMd.requireFilters() != null) {\n+ if (!indexMd.requireFilters().match(node.node())) {\n+ return true;\n+ }\n+ }\n if (indexMd.includeFilters() != null) {\n if (!indexMd.includeFilters().match(node.node())) {\n return true;\n@@ -106,13 +127,17 @@ private boolean shouldFilter(ShardRouting shardRouting, RoutingNode node, Routin\n class ApplySettings implements NodeSettingsService.Listener {\n @Override\n public void onRefreshSettings(Settings settings) {\n+ ImmutableMap<String, String> requireMap = settings.getByPrefix(\"cluster.routing.allocation.require.\").getAsMap();\n+ if (!requireMap.isEmpty()) {\n+ clusterRequireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap);\n+ }\n ImmutableMap<String, String> includeMap = settings.getByPrefix(\"cluster.routing.allocation.include.\").getAsMap();\n if (!includeMap.isEmpty()) {\n- clusterIncludeFilters = DiscoveryNodeFilters.buildFromKeyValue(includeMap);\n+ clusterIncludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap);\n }\n ImmutableMap<String, String> excludeMap = settings.getByPrefix(\"cluster.routing.allocation.exclude.\").getAsMap();\n if (!excludeMap.isEmpty()) {\n- clusterExcludeFilters = DiscoveryNodeFilters.buildFromKeyValue(excludeMap);\n+ clusterExcludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, excludeMap);\n }\n }\n }", "filename": "src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java", "status": "modified" }, { "diff": "@@ -93,4 +93,66 @@ public void testDecommissionNodeNoReplicas() throws Exception {\n client(\"node1\").admin().indices().prepareRefresh().execute().actionGet();\n assertThat(client(\"node1\").prepareCount().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().count(), equalTo(100l));\n }\n+\n+ @Test\n+ public void testDisablingAllocationFiltering() throws Exception {\n+ logger.info(\"--> starting 2 nodes\");\n+ startNode(\"node1\");\n+ startNode(\"node2\");\n+\n+ logger.info(\"--> creating an index with no replicas\");\n+ client(\"node1\").admin().indices().prepareCreate(\"test\")\n+ .setSettings(settingsBuilder().put(\"index.number_of_replicas\", 0))\n+ .execute().actionGet();\n+\n+ ClusterHealthResponse clusterHealthResponse = client(\"node1\").admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();\n+ assertThat(clusterHealthResponse.timedOut(), equalTo(false));\n+\n+ logger.info(\"--> index some data\");\n+ for (int i = 0; i < 100; i++) {\n+ client(\"node1\").prepareIndex(\"test\", \"type\", Integer.toString(i)).setSource(\"field\", \"value\" + i).execute().actionGet();\n+ }\n+ client(\"node1\").admin().indices().prepareRefresh().execute().actionGet();\n+ assertThat(client(\"node1\").prepareCount().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().count(), equalTo(100l));\n+\n+ logger.info(\"--> remove index from the first node\");\n+ client(\"node1\").admin().indices().prepareUpdateSettings(\"test\")\n+ .setSettings(settingsBuilder().put(\"index.routing.allocation.exclude._name\", \"node1\"))\n+ .execute().actionGet();\n+\n+ Thread.sleep(200);\n+\n+ clusterHealthResponse = client(\"node1\").admin().cluster().prepareHealth()\n+ .setWaitForGreenStatus()\n+ .setWaitForRelocatingShards(0)\n+ .execute().actionGet();\n+ assertThat(clusterHealthResponse.timedOut(), equalTo(false));\n+\n+ logger.info(\"--> verify all shards are allocated on node2 now\");\n+ ClusterState clusterState = client(\"node1\").admin().cluster().prepareState().execute().actionGet().state();\n+ IndexRoutingTable indexRoutingTable = clusterState.routingTable().index(\"test\");\n+ for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {\n+ for (ShardRouting shardRouting : indexShardRoutingTable) {\n+ assertThat(clusterState.nodes().get(shardRouting.currentNodeId()).name(), equalTo(\"node2\"));\n+ }\n+ }\n+\n+ logger.info(\"--> disable allocation filtering \");\n+ client(\"node1\").admin().indices().prepareUpdateSettings(\"test\")\n+ .setSettings(settingsBuilder().put(\"index.routing.allocation.exclude._name\", \"\"))\n+ .execute().actionGet();\n+\n+ Thread.sleep(200);\n+\n+ clusterHealthResponse = client(\"node1\").admin().cluster().prepareHealth()\n+ .setWaitForGreenStatus()\n+ .setWaitForRelocatingShards(0)\n+ .execute().actionGet();\n+ assertThat(clusterHealthResponse.timedOut(), equalTo(false));\n+\n+ logger.info(\"--> verify that there are shards allocated on both nodes now\");\n+ clusterState = client(\"node1\").admin().cluster().prepareState().execute().actionGet().state();\n+ assertThat(clusterState.routingTable().index(\"test\").numberOfNodesShardsAreAllocatedOn(), equalTo(2));\n+ }\n }\n+", "filename": "src/test/java/org/elasticsearch/test/integration/cluster/allocation/FilteringAllocationTests.java", "status": "modified" }, { "diff": "@@ -29,6 +29,7 @@\n \n import static org.hamcrest.MatcherAssert.assertThat;\n import static org.hamcrest.Matchers.equalTo;\n+import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.*;\n \n /**\n */\n@@ -40,7 +41,7 @@ public void nameMatch() {\n Settings settings = ImmutableSettings.settingsBuilder()\n .put(\"xxx.name\", \"name1\")\n .build();\n- DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(\"xxx.\", settings);\n+ DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, \"xxx.\", settings);\n \n DiscoveryNode node = new DiscoveryNode(\"name1\", \"id1\", DummyTransportAddress.INSTANCE, ImmutableMap.<String, String>of());\n assertThat(filters.match(node), equalTo(true));\n@@ -54,7 +55,7 @@ public void idMatch() {\n Settings settings = ImmutableSettings.settingsBuilder()\n .put(\"xxx._id\", \"id1\")\n .build();\n- DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(\"xxx.\", settings);\n+ DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, \"xxx.\", settings);\n \n DiscoveryNode node = new DiscoveryNode(\"name1\", \"id1\", DummyTransportAddress.INSTANCE, ImmutableMap.<String, String>of());\n assertThat(filters.match(node), equalTo(true));\n@@ -64,11 +65,53 @@ public void idMatch() {\n }\n \n @Test\n- public void emptyString() {\n+ public void idOrNameMatch() {\n Settings settings = ImmutableSettings.settingsBuilder()\n- .put(\"xxx.name\", \"\")\n+ .put(\"xxx._id\", \"id1,blah\")\n+ .put(\"xxx.name\", \"blah,name2\")\n .build();\n- DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(\"xxx.\", settings);\n+ DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, \"xxx.\", settings);\n+\n+ DiscoveryNode node = new DiscoveryNode(\"name1\", \"id1\", DummyTransportAddress.INSTANCE, ImmutableMap.<String, String>of());\n+ assertThat(filters.match(node), equalTo(true));\n+\n+ node = new DiscoveryNode(\"name2\", \"id2\", DummyTransportAddress.INSTANCE, ImmutableMap.<String, String>of());\n+ assertThat(filters.match(node), equalTo(true));\n+\n+ node = new DiscoveryNode(\"name3\", \"id3\", DummyTransportAddress.INSTANCE, ImmutableMap.<String, String>of());\n+ assertThat(filters.match(node), equalTo(false));\n+ }\n+\n+ @Test\n+ public void tagAndGroupMatch() {\n+ Settings settings = ImmutableSettings.settingsBuilder()\n+ .put(\"xxx.tag\", \"A\")\n+ .put(\"xxx.group\", \"B\")\n+ .build();\n+ DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, \"xxx.\", settings);\n+\n+ DiscoveryNode node = new DiscoveryNode(\"name1\", \"id1\", DummyTransportAddress.INSTANCE,\n+ ImmutableMap.<String, String>of(\"tag\", \"A\", \"group\", \"B\"));\n+ assertThat(filters.match(node), equalTo(true));\n+\n+ node = new DiscoveryNode(\"name2\", \"id2\", DummyTransportAddress.INSTANCE,\n+ ImmutableMap.<String, String>of(\"tag\", \"A\", \"group\", \"B\", \"name\", \"X\"));\n+ assertThat(filters.match(node), equalTo(true));\n+\n+ node = new DiscoveryNode(\"name3\", \"id3\", DummyTransportAddress.INSTANCE,\n+ ImmutableMap.<String, String>of(\"tag\", \"A\", \"group\", \"F\", \"name\", \"X\"));\n+ assertThat(filters.match(node), equalTo(false));\n+\n+ node = new DiscoveryNode(\"name4\", \"id4\", DummyTransportAddress.INSTANCE, ImmutableMap.<String, String>of());\n+ assertThat(filters.match(node), equalTo(false));\n+ }\n+\n+ @Test\n+ public void starMatch() {\n+ Settings settings = ImmutableSettings.settingsBuilder()\n+ .put(\"xxx.name\", \"*\")\n+ .build();\n+ DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, \"xxx.\", settings);\n \n DiscoveryNode node = new DiscoveryNode(\"name1\", \"id1\", DummyTransportAddress.INSTANCE, ImmutableMap.<String, String>of());\n assertThat(filters.match(node), equalTo(true));", "filename": "src/test/java/org/elasticsearch/test/unit/cluster/node/DiscoveryNodeFiltersTests.java", "status": "modified" } ] }
{ "body": "Issue #1056 reports the same problem as an \"ambiguity\" but this is an actual defect.\n\npatch_match patterns like \"tags._\" and \"_.tags.*\" are incorrectly matched and yield bad mappings when indexing documents. (A fix/pull request is on the way.)\n", "comments": [ { "body": "Pushed the fix.\n", "created_at": "2012-11-01T21:26:42Z" } ], "number": 2371, "title": "path_match support in dynamic templates is incorrect" }
{ "body": "This resolves Issue #2371 (_and_ Issue #1056).\n", "number": 2372, "review_comments": [], "title": "Fixed issue2371 (incorrect behavior of path_match)." }
{ "commits": [ { "message": "fixed issue2371 (incorrect behavior of path_match)" } ], "files": [ { "diff": "@@ -520,16 +520,16 @@ private void serializeObject(final ParseContext context, String currentFieldName\n objectMapper = mappers.get(currentFieldName);\n if (objectMapper == null) {\n newMapper = true;\n+ // remove the current field name from path, since template search and the object builder add it as well...\n+ context.path().remove();\n Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, \"object\");\n if (builder == null) {\n builder = MapperBuilders.object(currentFieldName).enabled(true).dynamic(dynamic).pathType(pathType);\n }\n- // remove the current field name from path, since the object builder adds it as well...\n- context.path().remove();\n BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path());\n objectMapper = builder.build(builderContext);\n putMapper(objectMapper);\n- // now re add it\n+ // ...now re add it\n context.path().add(currentFieldName);\n context.setMappingsModified();\n }", "filename": "src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java", "status": "modified" }, { "diff": "@@ -6,5 +6,10 @@\n \"obj2\":{\n \"name\":\"obj2_level\"\n }\n+ },\n+ \"obj3\":{\n+ \"obj4\":{\n+ \"prop1\":\"prop1_value\"\n+ }\n }\n }\n\\ No newline at end of file", "filename": "src/test/java/org/elasticsearch/test/unit/index/mapper/dynamictemplate/pathmatch/test-data.json", "status": "modified" }, { "diff": "@@ -16,6 +16,14 @@\n \"store\":\"yes\"\n }\n }\n+ },\n+ {\n+ \"template_3\":{\n+ \"path_match\":\"*.obj4.*\",\n+ \"mapping\":{\n+ \"type\":\"string\"\n+ }\n+ }\n }\n ]\n }", "filename": "src/test/java/org/elasticsearch/test/unit/index/mapper/dynamictemplate/pathmatch/test-mapping.json", "status": "modified" } ] }
{ "body": "```\ncurl -XDELETE 'http://127.0.0.1:9200/foo/bar/_warmer/1?pretty=1' \n\n[2012-10-27 13:03:26,392][WARN ][cluster.service ] [Oddball] failed to execute cluster state update, state:\nversion [7], source [delete_warmer [1]]\nnodes: \n [Oddball][CNyPGneKSIGNtS7MkcewyQ][inet[/192.168.5.20:9300]], local, master\nrouting_table:\n-- index [test]\n----shard_id [test][0]\n--------[test][0], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[test][0], node[null], [R], s[UNASSIGNED]\n----shard_id [test][1]\n--------[test][1], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[test][1], node[null], [R], s[UNASSIGNED]\n----shard_id [test][2]\n--------[test][2], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[test][2], node[null], [R], s[UNASSIGNED]\n----shard_id [test][3]\n--------[test][3], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[test][3], node[null], [R], s[UNASSIGNED]\n----shard_id [test][4]\n--------[test][4], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[test][4], node[null], [R], s[UNASSIGNED]\n\n-- index [foo]\n----shard_id [foo][0]\n--------[foo][0], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[foo][0], node[null], [R], s[UNASSIGNED]\n----shard_id [foo][1]\n--------[foo][1], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[foo][1], node[null], [R], s[UNASSIGNED]\n----shard_id [foo][2]\n--------[foo][2], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[foo][2], node[null], [R], s[UNASSIGNED]\n----shard_id [foo][3]\n--------[foo][3], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[foo][3], node[null], [R], s[UNASSIGNED]\n----shard_id [foo][4]\n--------[foo][4], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[foo][4], node[null], [R], s[UNASSIGNED]\n\nrouting_nodes:\n-----node_id[CNyPGneKSIGNtS7MkcewyQ][V]\n--------[test][0], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[test][1], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[test][2], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[test][3], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[test][4], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[foo][0], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[foo][1], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[foo][2], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[foo][3], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n--------[foo][4], node[CNyPGneKSIGNtS7MkcewyQ], [P], s[STARTED]\n---- unassigned\n--------[test][0], node[null], [R], s[UNASSIGNED]\n--------[test][1], node[null], [R], s[UNASSIGNED]\n--------[test][2], node[null], [R], s[UNASSIGNED]\n--------[test][3], node[null], [R], s[UNASSIGNED]\n--------[test][4], node[null], [R], s[UNASSIGNED]\n--------[foo][0], node[null], [R], s[UNASSIGNED]\n--------[foo][1], node[null], [R], s[UNASSIGNED]\n--------[foo][2], node[null], [R], s[UNASSIGNED]\n--------[foo][3], node[null], [R], s[UNASSIGNED]\n--------[foo][4], node[null], [R], s[UNASSIGNED]\n\norg.elasticsearch.search.warmer.IndexWarmerMissingException: index_warmer [1] missing\n at org.elasticsearch.action.admin.indices.warmer.delete.TransportDeleteWarmerAction$1.execute(TransportDeleteWarmerAction.java:130)\n at org.elasticsearch.cluster.service.InternalClusterService$2.run(InternalClusterService.java:223)\n at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1110)\n at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:603)\n at java.lang.Thread.run(Thread.java:679)\n```\n", "comments": [ { "body": "The issue occurs because TransportDeleteWarmerAction [throws an exception](/elasticsearch/elasticsearch/blob/master/src/main/java/org/elasticsearch/action/admin/indices/warmer/delete/TransportDeleteWarmerAction.java#L130) inside submitStateUpdateTask, and as a result [the count down latch](/elasticsearch/elasticsearch/blob/master/src/main/java/org/elasticsearch/action/admin/indices/warmer/delete/TransportDeleteWarmerAction.java#L155) in ProcessedClusterStateUpdateTask.clusterStateProcessed is never triggered and the REST request waits forever for a response. We could solve this issue by surrounding the body of the execute method in TransportDeleteWarmerAction with try catch block and releasing the latch if an exception is thrown. However, I think it would be more useful to add the clusterStateProcessingFailed method to the ProcessedClusterStateUpdateTask, that would be triggered in the event of cluster state update failure. Because failure can occur after the ClusterStateUpdateTask.execute method returns, (for example if one of the cluster state listeners throws an exception), it's still possible that the clusterStateProcessed would be never called even if ClusterStateUpdateTask.execute was successful. By adding clusterStateProcessingFailed method we would guarantee that the submitStateUpdateTask caller would get a response if update was successful (clusterStateProcessed) as well as if it failed (clusterStateProcessingFailed). \n", "created_at": "2012-10-29T02:16:41Z" } ], "number": 2363, "title": "Deleting a non-existent warmer causes ES to hang" }
{ "body": "Fixes #2363\n", "number": 2365, "review_comments": [], "title": "Deleting a non-existent warmer shouldn't cause request to hang" }
{ "commits": [ { "message": "Deleting a non-existent warmer shouldn't cause request to hang\n\nFixes #2363" } ], "files": [ { "diff": "@@ -94,60 +94,66 @@ protected DeleteWarmerResponse masterOperation(final DeleteWarmerRequest request\n clusterService.submitStateUpdateTask(\"delete_warmer [\" + request.name() + \"]\", new ProcessedClusterStateUpdateTask() {\n @Override\n public ClusterState execute(ClusterState currentState) {\n- MetaData.Builder mdBuilder = MetaData.builder().metaData(currentState.metaData());\n+ try {\n+ MetaData.Builder mdBuilder = MetaData.builder().metaData(currentState.metaData());\n \n- boolean globalFoundAtLeastOne = false;\n- for (String index : request.indices()) {\n- IndexMetaData indexMetaData = currentState.metaData().index(index);\n- if (indexMetaData == null) {\n- throw new IndexMissingException(new Index(index));\n- }\n- IndexWarmersMetaData warmers = indexMetaData.custom(IndexWarmersMetaData.TYPE);\n- if (warmers != null) {\n- List<IndexWarmersMetaData.Entry> entries = Lists.newArrayList();\n- for (IndexWarmersMetaData.Entry entry : warmers.entries()) {\n- if (request.name() == null || Regex.simpleMatch(request.name(), entry.name())) {\n- globalFoundAtLeastOne = true;\n- // don't add it...\n- } else {\n- entries.add(entry);\n- }\n- }\n- // a change, update it...\n- if (entries.size() != warmers.entries().size()) {\n- warmers = new IndexWarmersMetaData(entries.toArray(new IndexWarmersMetaData.Entry[entries.size()]));\n- IndexMetaData.Builder indexBuilder = IndexMetaData.newIndexMetaDataBuilder(indexMetaData).putCustom(IndexWarmersMetaData.TYPE, warmers);\n- mdBuilder.put(indexBuilder);\n- }\n- }\n- }\n-\n- if (!globalFoundAtLeastOne) {\n- if (request.name() == null) {\n- // full match, just return with no failure\n- return currentState;\n- }\n- throw new IndexWarmerMissingException(request.name());\n- }\n-\n- if (logger.isInfoEnabled()) {\n+ boolean globalFoundAtLeastOne = false;\n for (String index : request.indices()) {\n IndexMetaData indexMetaData = currentState.metaData().index(index);\n if (indexMetaData == null) {\n throw new IndexMissingException(new Index(index));\n }\n IndexWarmersMetaData warmers = indexMetaData.custom(IndexWarmersMetaData.TYPE);\n if (warmers != null) {\n+ List<IndexWarmersMetaData.Entry> entries = Lists.newArrayList();\n for (IndexWarmersMetaData.Entry entry : warmers.entries()) {\n- if (Regex.simpleMatch(request.name(), entry.name())) {\n- logger.info(\"[{}] delete warmer [{}]\", index, entry.name());\n+ if (request.name() == null || Regex.simpleMatch(request.name(), entry.name())) {\n+ globalFoundAtLeastOne = true;\n+ // don't add it...\n+ } else {\n+ entries.add(entry);\n+ }\n+ }\n+ // a change, update it...\n+ if (entries.size() != warmers.entries().size()) {\n+ warmers = new IndexWarmersMetaData(entries.toArray(new IndexWarmersMetaData.Entry[entries.size()]));\n+ IndexMetaData.Builder indexBuilder = IndexMetaData.newIndexMetaDataBuilder(indexMetaData).putCustom(IndexWarmersMetaData.TYPE, warmers);\n+ mdBuilder.put(indexBuilder);\n+ }\n+ }\n+ }\n+\n+ if (!globalFoundAtLeastOne) {\n+ if (request.name() == null) {\n+ // full match, just return with no failure\n+ return currentState;\n+ }\n+ throw new IndexWarmerMissingException(request.name());\n+ }\n+\n+ if (logger.isInfoEnabled()) {\n+ for (String index : request.indices()) {\n+ IndexMetaData indexMetaData = currentState.metaData().index(index);\n+ if (indexMetaData == null) {\n+ throw new IndexMissingException(new Index(index));\n+ }\n+ IndexWarmersMetaData warmers = indexMetaData.custom(IndexWarmersMetaData.TYPE);\n+ if (warmers != null) {\n+ for (IndexWarmersMetaData.Entry entry : warmers.entries()) {\n+ if (Regex.simpleMatch(request.name(), entry.name())) {\n+ logger.info(\"[{}] delete warmer [{}]\", index, entry.name());\n+ }\n }\n }\n }\n }\n- }\n \n- return ClusterState.builder().state(currentState).metaData(mdBuilder).build();\n+ return ClusterState.builder().state(currentState).metaData(mdBuilder).build();\n+ } catch (Exception ex) {\n+ failureRef.set(ex);\n+ latch.countDown();\n+ return currentState;\n+ }\n }\n \n @Override", "filename": "src/main/java/org/elasticsearch/action/admin/indices/warmer/delete/TransportDeleteWarmerAction.java", "status": "modified" }, { "diff": "@@ -23,6 +23,7 @@\n import org.elasticsearch.cluster.ClusterState;\n import org.elasticsearch.common.settings.ImmutableSettings;\n import org.elasticsearch.index.query.QueryBuilders;\n+import org.elasticsearch.search.warmer.IndexWarmerMissingException;\n import org.elasticsearch.search.warmer.IndexWarmersMetaData;\n import org.elasticsearch.test.integration.AbstractNodesTests;\n import org.hamcrest.Matchers;\n@@ -142,4 +143,20 @@ public void createIndexWarmer() {\n client.prepareIndex(\"test\", \"type1\", \"1\").setSource(\"field\", \"value1\").setRefresh(true).execute().actionGet();\n client.prepareIndex(\"test\", \"type1\", \"2\").setSource(\"field\", \"value2\").setRefresh(true).execute().actionGet();\n }\n+\n+ @Test\n+ public void deleteNonExistentIndexWarmerTest() {\n+ client.admin().indices().prepareDelete().execute().actionGet();\n+\n+ client.admin().indices().prepareCreate(\"test\").execute().actionGet();\n+\n+ try {\n+ client.admin().indices().prepareDeleteWarmer().setIndices(\"test\").setName(\"foo\").execute().actionGet(1000);\n+ assert false : \"warmer foo should not exist\";\n+ }\n+ catch(IndexWarmerMissingException ex) {\n+ assertThat(ex.name(), equalTo(\"foo\"));\n+ }\n+ }\n+\n }", "filename": "src/test/java/org/elasticsearch/test/integration/indices/wamer/SimpleIndicesWarmerTests.java", "status": "modified" } ] }
{ "body": "This issue was introduced in v0.19.9.\n\nSteps to reproduce:\n1. create test index\n\n```\ncurl -XPUT localhost:9200/test\n```\n1. update mapping\n\n```\ncurl -XPUT 'http://localhost:9200/test/type1/_mapping' -d '\n{\n \"type1\" : {\n \"_size\" : {\n \"enabled\" : true,\n \"store\" : \"yes\"\n }\n }\n}'\n```\n1. register percolator query:\n\n```\ncurl -XPUT localhost:9200/_percolator/test/kuku -d '{\n \"query\" : {\n \"term\" : {\n \"field1\" : \"value1\"\n }\n }\n}'\n```\n1. percolate a document:\n\n```\ncurl -XGET localhost:9200/test/type1/_percolate -d '{\n \"doc\" : {\n \"field1\" : \"value1\"\n }\n}'\n```\n\nThis leads to the following error:\n\n```\n2012-09-26 09:39:21,370][DEBUG][action.percolate ] [Thor Girl] failed to execute [org.elasticsearch.action.percolate.PercolateRequest@e1935d6]\norg.elasticsearch.index.mapper.MapperParsingException: Failed to parse [_size]\n at org.elasticsearch.index.mapper.core.AbstractFieldMapper.parse(AbstractFieldMapper.java:327)\n at org.elasticsearch.index.mapper.internal.SizeFieldMapper.postParse(SizeFieldMapper.java:122)\n at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:502)\n at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:438)\n at org.elasticsearch.index.percolator.PercolatorExecutor.percolate(PercolatorExecutor.java:257)\n at org.elasticsearch.index.percolator.PercolatorService.percolate(PercolatorService.java:111)\n at org.elasticsearch.action.percolate.TransportPercolateAction.shardOperation(TransportPercolateAction.java:93)\n at org.elasticsearch.action.percolate.TransportPercolateAction.shardOperation(TransportPercolateAction.java:41)\n at org.elasticsearch.action.support.single.custom.TransportSingleCustomOperationAction$AsyncSingleAction$2.run(TransportSingleCustomOperationAction.java:176)\n at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)\n at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)\n at java.lang.Thread.run(Thread.java:680)\nCaused by: java.lang.NullPointerException\n at org.elasticsearch.index.mapper.internal.SizeFieldMapper.innerParseCreateField(SizeFieldMapper.java:140)\n at org.elasticsearch.index.mapper.core.NumberFieldMapper.parseCreateField(NumberFieldMapper.java:171)\n at org.elasticsearch.index.mapper.core.AbstractFieldMapper.parse(AbstractFieldMapper.java:314)\n ... 11 more\n{\"error\":\"MapperParsingException[Failed to parse [_size]]; nested: \",\"status\":400}\n```\n", "comments": [ { "body": "Pushed.\n", "created_at": "2012-10-23T21:12:03Z" } ], "number": 2352, "title": "MapperParsingException on percolation with _size enabled" }
{ "body": "Fixes #2352\n", "number": 2353, "review_comments": [], "title": "Percolation shouldn't fail when the _size field is enabled" }
{ "commits": [ { "message": "Percolation shouldn't fail when the _size field is enabled" }, { "message": "Add test for percolation with the _size field enabled" } ], "files": [ { "diff": "@@ -137,6 +137,9 @@ protected Fieldable innerParseCreateField(ParseContext context) throws IOExcepti\n if (!enabled) {\n return null;\n }\n+ if (context.flyweight()) {\n+ return null;\n+ }\n return new CustomIntegerNumericField(this, context.source().length());\n }\n ", "filename": "src/main/java/org/elasticsearch/index/mapper/internal/SizeFieldMapper.java", "status": "modified" }, { "diff": "@@ -25,6 +25,7 @@\n import org.elasticsearch.action.index.IndexResponse;\n import org.elasticsearch.action.percolate.PercolateResponse;\n import org.elasticsearch.client.Client;\n+import org.elasticsearch.common.xcontent.XContentFactory;\n import org.elasticsearch.test.integration.AbstractNodesTests;\n import org.testng.annotations.AfterClass;\n import org.testng.annotations.BeforeClass;\n@@ -386,4 +387,47 @@ public void dynamicAddingRemovingQueries() throws Exception {\n .execute().actionGet();\n assertThat(percolate.matches().size(), equalTo(0));\n }\n+\n+ @Test\n+ public void percolateWithSizeField() throws Exception {\n+ try {\n+ client.admin().indices().prepareDelete(\"test\").execute().actionGet();\n+ } catch (Exception e) {\n+ // ignore\n+ }\n+ try {\n+ client.admin().indices().prepareDelete(\"_percolator\").execute().actionGet();\n+ } catch (Exception e) {\n+ // ignore\n+ }\n+ String mapping = XContentFactory.jsonBuilder().startObject().startObject(\"type1\")\n+ .startObject(\"_size\").field(\"enabled\", true).field(\"stored\", \"yes\").endObject()\n+ .endObject().endObject().string();\n+\n+ client.admin().indices().prepareCreate(\"test\")\n+ .setSettings(settingsBuilder().put(\"index.number_of_shards\", 2))\n+ .addMapping(\"type1\",mapping)\n+ .execute().actionGet();\n+ client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();\n+\n+ logger.info(\"--> register a query\");\n+ client.prepareIndex(\"_percolator\", \"test\", \"kuku\")\n+ .setSource(jsonBuilder().startObject()\n+ .field(\"query\", termQuery(\"field1\", \"value1\"))\n+ .endObject())\n+ .setRefresh(true)\n+ .execute().actionGet();\n+ client.admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForActiveShards(4).execute().actionGet();\n+\n+ logger.info(\"--> percolate a document\");\n+ PercolateResponse percolate = client.preparePercolate(\"test\", \"type1\").setSource(jsonBuilder().startObject()\n+ .startObject(\"doc\").startObject(\"type1\")\n+ .field(\"field1\", \"value1\")\n+ .endObject().endObject()\n+ .endObject())\n+ .execute().actionGet();\n+ assertThat(percolate.matches().size(), equalTo(1));\n+ assertThat(percolate.matches(), hasItem(\"kuku\"));\n+ }\n+\n }", "filename": "src/test/java/org/elasticsearch/test/integration/percolator/SimplePercolatorTests.java", "status": "modified" } ] }
{ "body": "If a record contains a phrase with stop words in the middle (`foo and bar`, for example) and the search expression contains a matching phrase, highlighter produces different results that depend on how the field was indexed. \n- If the field was indexed with `\"term_vector\": \"with_positions_offsets\"` elasticsearch doesn't highlight anything\n- if the field was indexed with `\"term_vector\": \"with_positions_offsets\"` and the field's analyzer has `\"enable_position_increments\": false`, elasticsearch highlights the entire phrase: `foo and bar`. \n- if the field was indexed without \"term_vector\" enabled elasticsearch highlights words `foo` and `bar` separately.\n\nFull repro can be found here: https://gist.github.com/3280061\n\nSolr seems to be behaving the same way: see https://gist.github.com/3279879#file_response_fvh.txt and [SOLR-3724](https://issues.apache.org/jira/browse/SOLR-3724)\n\nA possible workaround for us would be to add a flag on the highlighter query and globally that would disable fast vector highlighter. FVH is relatively new portion of lucene and this is already the second time when I wish I had a way to temporarily disable it without reindexing my data. (The first time was [LUCENE-3719](https://issues.apache.org/jira/browse/LUCENE-3719).) I could submit a pull request if you think that would be a useful feature.\n", "comments": [ { "body": "Yea, I think adding a \"type\" for highlighting will make sense, as long as we throw a proper failure when trying to use \"fvh\" and no term vectors exists.\n", "created_at": "2012-08-09T18:51:52Z" }, { "body": "Weirdly, even though it is no longer possible to disable position increments in the stop filter, the FVH still doesn't highlight phrases with stopwords.\n", "created_at": "2014-12-24T15:36:00Z" }, { "body": "Any chance there has been any update on this?? The default \"english\" analyzer exposed this issue so easily. \n", "created_at": "2015-09-02T21:31:03Z" }, { "body": "+1 I need to use the fvh filter to combine the highlights from searching two fields (one using the English analyzer and one using whitespace, to capture exact matches). However, highlighting fails for phrases with stopwords.\n", "created_at": "2015-09-13T07:30:43Z" }, { "body": "Give the [experimental highlighter](https://github.com/wikimedia/search-highlighter) a shot. It works reasonably well as a replacement for the fvh and _should_ support this though I don't remember testing it explicitly. It [looks](https://en.wikipedia.org/w/index.php?search=to+be+or+not+to+be&title=Special%3ASearch&go=Go&fulltext=1) like this works, though it might be working because of the more whitespace like analyzer in use over there in addition to the english one.\n\nI believe I'll be working a ton more on highlighters in the coming months and I'll make sure this is on the list of things we fix.\n", "created_at": "2015-09-13T11:36:33Z" }, { "body": "Thank you, @nik9000! I tried the experimental highlighter and it had the same behavior as fvh (ignoring stopwords in phrase searches, and therefore missing matches where the stopword is in the middle-- e.g. \"motion to adjourn\"). I also noticed that the experimental highlighter doesn't support whole-field fragments (number_of_fragments = 0).\n**However**, your comment solved my problem. Indeed, it looks like the Wikipedia search matches phrases because it uses a whitespace-like analyzer. So, I took out the stopwords from my English analyzer, reindexed, and now I've got phrase highlighting with stopwords. Would still be nice to have the option to match stopwords with the highlighter only, but this works for now.\n", "created_at": "2015-09-14T06:25:09Z" }, { "body": "Updated for 5.0. FVH still doesn't handle phrase queries with stop words unless slop is included:\n\n```\nPUT /test-idx\n{\n \"settings\": {\n \"index\": {\n \"analysis\": {\n \"analyzer\": {\n \"standard_stop\": {\n \"type\": \"standard\",\n \"stopwords\": \"_english_\"\n }\n }\n }\n }\n },\n \"mappings\": {\n \"rec\": {\n \"properties\": {\n \"message\": {\n \"type\": \"text\",\n \"analyzer\": \"standard\",\n \"term_vector\": \"with_positions_offsets\",\n \"fields\": {\n \"stop\": {\n \"type\": \"text\",\n \"analyzer\": \"standard_stop\",\n \"term_vector\": \"with_positions_offsets\"\n }\n }\n }\n }\n }\n }\n}\n\nPUT /test-idx/rec/1\n{\n \"message\": \"foo and bar and something else\"\n}\n\nGET /test-idx/_search\n{\n \"highlight\": {\n \"fields\": {\n \"message\": {},\n \"message.stop\": {}\n }\n },\n \"query\": {\n \"query_string\": {\n \"fields\": [\"message\",\"message.stop\"],\n \"query\": \"\\\"foo and bar\\\"\"\n }\n }\n}\n\n```\n\nFVH works when slop is enabled:\n\n```\nGET /test-idx/_search\n{\n \"highlight\": {\n \"fields\": {\n \"message\": {},\n \"message.stop\": {}\n }\n },\n \"query\": {\n \"query_string\": {\n \"fields\": [\"message\",\"message.stop\"],\n \"query\": \"\\\"foo and bar\\\"~4\"\n }\n }\n}\n```\n", "created_at": "2016-11-06T09:22:24Z" }, { "body": "Is there any solution for this?. highlighting is not working when I index with English Analyzer, and search for a phrase with stop words.\n", "created_at": "2016-11-10T13:12:57Z" }, { "body": "@jimczi The FVH doesn't handle phrase matches when slop is involved - is this a bug in our implementation or in Lucene?\n", "created_at": "2016-11-10T13:45:47Z" }, { "body": "How can I make the stop words highlighted? (using English Analyzer and With_Positions_Offsets) in phrase matches query?\n", "created_at": "2016-11-10T14:15:52Z" }, { "body": "The problem is in Lucene:\nhttps://issues.apache.org/jira/browse/LUCENE-7551\nThe FVH does not handle gaps in phrase query. I'll work on this.\n", "created_at": "2016-11-10T16:33:08Z" }, { "body": "@jimczi apparently you opened the JIRA issue twice :)\nDo you think this is related to https://issues.apache.org/jira/browse/LUCENE-7541 as well?\n", "created_at": "2016-11-10T18:07:00Z" }, { "body": "Thanks @dadoonet, this is not related. This one is about gaps in phrase query and the other one is about repeated tokens in the query. The multi tagging system of the FVH is at the token level and it will not be easy to fix that.\n", "created_at": "2016-11-10T20:32:07Z" }, { "body": "Closing in favour of #21621", "created_at": "2016-11-24T18:13:23Z" } ], "number": 2157, "title": "No highlighting for phrases with stop words when term vector with positions offsets is enabled" }
{ "body": "Workaround for #2157\n", "number": 2350, "review_comments": [], "title": "Add highlighter type switch" }
{ "commits": [ { "message": "Add highlighter type switch" } ], "files": [ { "diff": "@@ -646,6 +646,14 @@ public SearchRequestBuilder setHighlighterRequireFieldMatch(boolean requireField\n return this;\n }\n \n+ /**\n+ * The highlighter type to use.\n+ */\n+ public SearchRequestBuilder setHighlighterType(String type) {\n+ highlightBuilder().highlighterType(type);\n+ return this;\n+ }\n+\n /**\n * Sets the source of the request as a json string. Note, settings anything other\n * than the search type will cause this source to be overridden, consider using", "filename": "src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java", "status": "modified" }, { "diff": "@@ -48,6 +48,9 @@ public class HighlightBuilder implements ToXContent {\n \n private Boolean requireFieldMatch;\n \n+ private String highlighterType;\n+\n+\n /**\n * Adds a field to be highlighted with default fragment size of 100 characters, and\n * default number of fragments of 5 using the default encoder\n@@ -176,6 +179,15 @@ public HighlightBuilder requireFieldMatch(boolean requireFieldMatch) {\n return this;\n }\n \n+ /**\n+ * Set type of highlighter to use. Supported types\n+ * are <tt>highlighter</tt> and <tt>fast-vector-highlighter</tt>.\n+ */\n+ public HighlightBuilder highlighterType(String highlighterType) {\n+ this.highlighterType = highlighterType;\n+ return this;\n+ }\n+\n @Override\n public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {\n builder.startObject(\"highlight\");\n@@ -197,6 +209,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws\n if (requireFieldMatch != null) {\n builder.field(\"require_field_match\", requireFieldMatch);\n }\n+ if (highlighterType != null) {\n+ builder.field(\"type\", highlighterType);\n+ }\n if (fields != null) {\n builder.startObject(\"fields\");\n for (Field field : fields) {\n@@ -213,6 +228,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws\n if (field.requireFieldMatch != null) {\n builder.field(\"require_field_match\", field.requireFieldMatch);\n }\n+ if (field.highlighterType != null) {\n+ builder.field(\"type\", field.highlighterType);\n+ }\n \n builder.endObject();\n }\n@@ -229,6 +247,7 @@ public static class Field {\n int fragmentOffset = -1;\n int numOfFragments = -1;\n Boolean requireFieldMatch;\n+ String highlighterType;\n \n public Field(String name) {\n this.name = name;\n@@ -257,5 +276,10 @@ public Field requireFieldMatch(boolean requireFieldMatch) {\n this.requireFieldMatch = requireFieldMatch;\n return this;\n }\n+\n+ public Field highlighterType(String highlighterType) {\n+ this.highlighterType = highlighterType;\n+ return this;\n+ }\n }\n }", "filename": "src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java", "status": "modified" }, { "diff": "@@ -129,10 +129,22 @@ public void hitExecute(SearchContext context, HitContext hitContext) throws Elas\n continue;\n }\n }\n-\n- // if we can do highlighting using Term Vectors, use FastVectorHighlighter, otherwise, use the\n- // slower plain highlighter\n- if (mapper.termVector() != Field.TermVector.WITH_POSITIONS_OFFSETS) {\n+ boolean useFastVectorHighlighter;\n+ if (field.highlighterType() == null) {\n+ // if we can do highlighting using Term Vectors, use FastVectorHighlighter, otherwise, use the\n+ // slower plain highlighter\n+ useFastVectorHighlighter = mapper.termVector() == Field.TermVector.WITH_POSITIONS_OFFSETS;\n+ } else if (field.highlighterType().equals(\"fast-vector-highlighter\")) {\n+ if (mapper.termVector() != Field.TermVector.WITH_POSITIONS_OFFSETS) {\n+ throw new FetchPhaseExecutionException(context, \"The field [\" + field.field() + \"] should be indexed with term vector with position offsets to be used with fast vector highlighter\");\n+ }\n+ useFastVectorHighlighter = true;\n+ } else if (field.highlighterType().equals(\"highlighter\")) {\n+ useFastVectorHighlighter = false;\n+ } else {\n+ throw new FetchPhaseExecutionException(context, \"Unknown highlighter type [\" + field.highlighterType() + \"] for the field [\" + field.field() + \"]\");\n+ }\n+ if (!useFastVectorHighlighter) {\n MapperHighlightEntry entry = cache.mappers.get(mapper);\n if (entry == null) {\n // Don't use the context.query() since it might be rewritten, and we need to pass the non rewritten queries to", "filename": "src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java", "status": "modified" }, { "diff": "@@ -76,6 +76,7 @@ public void parse(XContentParser parser, SearchContext context) throws Exception\n String globalEncoder = \"default\";\n int globalBoundaryMaxScan = SimpleBoundaryScanner2.DEFAULT_MAX_SCAN;\n char[] globalBoundaryChars = SimpleBoundaryScanner2.DEFAULT_BOUNDARY_CHARS;\n+ String globalHighlighterType = null;\n \n while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {\n if (token == XContentParser.Token.FIELD_NAME) {\n@@ -117,6 +118,8 @@ public void parse(XContentParser parser, SearchContext context) throws Exception\n globalBoundaryMaxScan = parser.intValue();\n } else if (\"boundary_chars\".equals(topLevelFieldName) || \"boundaryChars\".equals(topLevelFieldName)) {\n globalBoundaryChars = parser.text().toCharArray();\n+ } else if (\"type\".equals(topLevelFieldName)) {\n+ globalHighlighterType = parser.text();\n }\n } else if (token == XContentParser.Token.START_OBJECT) {\n if (\"fields\".equals(topLevelFieldName)) {\n@@ -161,6 +164,8 @@ public void parse(XContentParser parser, SearchContext context) throws Exception\n field.boundaryMaxScan(parser.intValue());\n } else if (\"boundary_chars\".equals(topLevelFieldName) || \"boundaryChars\".equals(topLevelFieldName)) {\n field.boundaryChars(parser.text().toCharArray());\n+ } else if (\"type\".equals(fieldName)) {\n+ field.highlighterType(parser.text());\n }\n }\n }\n@@ -206,6 +211,9 @@ public void parse(XContentParser parser, SearchContext context) throws Exception\n if (field.boundaryChars() == null) {\n field.boundaryChars(globalBoundaryChars);\n }\n+ if (field.highlighterType() == null) {\n+ field.highlighterType(globalHighlighterType);\n+ }\n }\n \n context.highlight(new SearchContextHighlight(fields));", "filename": "src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java", "status": "modified" }, { "diff": "@@ -58,6 +58,8 @@ public static class Field {\n \n private Boolean requireFieldMatch;\n \n+ private String highlighterType;\n+\n private int boundaryMaxScan = -1;\n private char[] boundaryChars = null;\n \n@@ -141,6 +143,14 @@ public void requireFieldMatch(boolean requireFieldMatch) {\n this.requireFieldMatch = requireFieldMatch;\n }\n \n+ public String highlighterType() {\n+ return highlighterType;\n+ }\n+\n+ public void highlighterType(String type) {\n+ this.highlighterType = type;\n+ }\n+\n public int boundaryMaxScan() {\n return boundaryMaxScan;\n }", "filename": "src/main/java/org/elasticsearch/search/highlight/SearchContextHighlight.java", "status": "modified" }, { "diff": "@@ -29,6 +29,7 @@\n import org.elasticsearch.indices.IndexMissingException;\n import org.elasticsearch.search.SearchHit;\n import org.elasticsearch.search.builder.SearchSourceBuilder;\n+import org.elasticsearch.search.highlight.HighlightBuilder;\n import org.elasticsearch.test.integration.AbstractNodesTests;\n import org.testng.annotations.AfterClass;\n import org.testng.annotations.BeforeClass;\n@@ -781,4 +782,109 @@ public void testMultiMapperNoVectorFromSource() throws Exception {\n hit = search.hits().getAt(0);\n assertThat(hit.highlightFields().get(\"title.key\").fragments()[0].string(), equalTo(\"<em>this</em> <em>is</em> <em>a</em> <em>test</em>\"));\n }\n+\n+ @Test\n+ public void testFastVectorHighlighterShouldFailIfNoTermVectors() throws Exception {\n+ try {\n+ client.admin().indices().prepareDelete(\"test\").execute().actionGet();\n+ } catch (Exception e) {\n+ // ignore\n+ }\n+\n+ client.admin().indices().prepareCreate(\"test\").setSettings(ImmutableSettings.settingsBuilder().put(\"number_of_shards\", 2))\n+ .addMapping(\"type1\", jsonBuilder().startObject().startObject(\"type1\").startObject(\"properties\")\n+ .startObject(\"title\").field(\"type\", \"string\").field(\"store\", \"yes\").field(\"term_vector\", \"no\").endObject()\n+ .endObject().endObject().endObject())\n+ .execute().actionGet();\n+\n+ for (int i = 0; i < 5; i++) {\n+ client.prepareIndex(\"test\", \"type1\", Integer.toString(i))\n+ .setSource(\"title\", \"This is a test for the enabling fast vector highlighter\").setRefresh(true).execute().actionGet();\n+ }\n+\n+ SearchResponse search = client.prepareSearch()\n+ .setQuery(matchPhraseQuery(\"title\", \"this is a test\"))\n+ .addHighlightedField(\"title\", 50, 1, 10)\n+ .execute().actionGet();\n+\n+ assertThat(Arrays.toString(search.shardFailures()), search.failedShards(), equalTo(0));\n+\n+ search = client.prepareSearch()\n+ .setQuery(matchPhraseQuery(\"title\", \"this is a test\"))\n+ .addHighlightedField(\"title\", 50, 1, 10)\n+ .setHighlighterType(\"fast-vector-highlighter\")\n+ .execute().actionGet();\n+\n+ assertThat(search.failedShards(), equalTo(2));\n+\n+ }\n+\n+ @Test\n+ public void testDisableFastVectorHighlighter() throws Exception {\n+ try {\n+ client.admin().indices().prepareDelete(\"test\").execute().actionGet();\n+ } catch (Exception e) {\n+ // ignore\n+ }\n+\n+ client.admin().indices().prepareCreate(\"test\").setSettings(ImmutableSettings.settingsBuilder().put(\"number_of_shards\", 2))\n+ .addMapping(\"type1\", jsonBuilder().startObject().startObject(\"type1\").startObject(\"properties\")\n+ .startObject(\"title\").field(\"type\", \"string\").field(\"store\", \"yes\").field(\"term_vector\", \"with_positions_offsets\").endObject()\n+ .endObject().endObject().endObject())\n+ .execute().actionGet();\n+\n+ for (int i = 0; i < 5; i++) {\n+ client.prepareIndex(\"test\", \"type1\", Integer.toString(i))\n+ .setSource(\"title\", \"This is a test for the workaround for the fast vector highlighting SOLR-3724\").setRefresh(true).execute().actionGet();\n+ }\n+\n+ SearchResponse search = client.prepareSearch()\n+ .setQuery(matchPhraseQuery(\"title\", \"test for the workaround\"))\n+ .addHighlightedField(\"title\", 50, 1, 10)\n+ .execute().actionGet();\n+\n+ assertThat(Arrays.toString(search.shardFailures()), search.failedShards(), equalTo(0));\n+\n+ assertThat(search.hits().totalHits(), equalTo(5l));\n+ assertThat(search.hits().hits().length, equalTo(5));\n+\n+ for (SearchHit hit : search.hits()) {\n+ // Because of SOLR-3724 nothing is highlighted when FVH is used\n+ assertThat(hit.highlightFields().isEmpty(), equalTo(true));\n+ }\n+\n+ // Using plain highlighter instead of FVH\n+ search = client.prepareSearch()\n+ .setQuery(matchPhraseQuery(\"title\", \"test for the workaround\"))\n+ .addHighlightedField(\"title\", 50, 1, 10)\n+ .setHighlighterType(\"highlighter\")\n+ .execute().actionGet();\n+\n+ assertThat(Arrays.toString(search.shardFailures()), search.failedShards(), equalTo(0));\n+\n+ assertThat(search.hits().totalHits(), equalTo(5l));\n+ assertThat(search.hits().hits().length, equalTo(5));\n+\n+ for (SearchHit hit : search.hits()) {\n+ // With plain highlighter terms are highlighted correctly\n+ assertThat(hit.highlightFields().get(\"title\").fragments()[0].string(), equalTo(\"This is a <em>test</em> for the <em>workaround</em> for the fast vector highlighting SOLR-3724\"));\n+ }\n+\n+ // Using plain highlighter instead of FVH on the field level\n+ search = client.prepareSearch()\n+ .setQuery(matchPhraseQuery(\"title\", \"test for the workaround\"))\n+ .addHighlightedField(new HighlightBuilder.Field(\"title\").highlighterType(\"highlighter\"))\n+ .setHighlighterType(\"highlighter\")\n+ .execute().actionGet();\n+\n+ assertThat(Arrays.toString(search.shardFailures()), search.failedShards(), equalTo(0));\n+\n+ assertThat(search.hits().totalHits(), equalTo(5l));\n+ assertThat(search.hits().hits().length, equalTo(5));\n+\n+ for (SearchHit hit : search.hits()) {\n+ // With plain highlighter terms are highlighted correctly\n+ assertThat(hit.highlightFields().get(\"title\").fragments()[0].string(), equalTo(\"This is a <em>test</em> for the <em>workaround</em> for the fast vector highlighting SOLR-3724\"));\n+ }\n+ }\n }", "filename": "src/test/java/org/elasticsearch/test/integration/search/highlight/HighlighterSearchTests.java", "status": "modified" } ] }
{ "body": "a user ran into this issue on the mailinglist: https://groups.google.com/d/topic/elasticsearch/ozfn5xhpH-g/discussion\n\nWe currently can't highlight constant score query if it is nested in another query. The Lucene 3.6 release opened up the WeightedSpanTermExtractor to extract from \"unknown\" queries, we can possibly move our logic in HighlightPhase in there now.\n", "comments": [ { "body": "pushed\n", "created_at": "2012-10-19T19:34:49Z" } ], "number": 2332, "title": "Highlighting fails for ConstantScoreQuery" }
{ "body": "This PR moves some of the \"hacked\" query extraction into a custom SpanTermExtractor that can extract those queries even if they are nested in a BQ or similar composite queries. This also exposes queryBuilder support on CustomScoreQueryBuilder via the Java API - this fixes #2332\n", "number": 2333, "review_comments": [], "title": "#2332 support CustomScoreQuery highlighting and expose QueryBuilder on CustomScoreQuery via Java API" }
{ "commits": [ { "message": "#2332 support CustomScoreQuery highlighting and expose QueryBuilder on CustomScoreQuery via Java API" } ], "files": [ { "diff": "@@ -32,8 +32,10 @@\n public class ConstantScoreQueryBuilder extends BaseQueryBuilder implements BoostableQueryBuilder<ConstantScoreQueryBuilder> {\n \n private final FilterBuilder filterBuilder;\n+ private final QueryBuilder queryBuilder;\n \n private float boost = -1;\n+ \n \n /**\n * A query that wraps a filter and simply returns a constant score equal to the\n@@ -43,7 +45,18 @@ public class ConstantScoreQueryBuilder extends BaseQueryBuilder implements Boost\n */\n public ConstantScoreQueryBuilder(FilterBuilder filterBuilder) {\n this.filterBuilder = filterBuilder;\n+ this.queryBuilder = null;\n }\n+ /**\n+ * A query that wraps a query and simply returns a constant score equal to the\n+ * query boost for every document in the query.\n+ *\n+ * @param queryBuilder The query to wrap in a constant score query\n+ */\n+ public ConstantScoreQueryBuilder(QueryBuilder queryBuilder) {\n+ this.filterBuilder = null;\n+ this.queryBuilder = queryBuilder;\n+ } \n \n /**\n * Sets the boost for this query. Documents matching this query will (in addition to the normal\n@@ -57,8 +70,15 @@ public ConstantScoreQueryBuilder boost(float boost) {\n @Override\n protected void doXContent(XContentBuilder builder, Params params) throws IOException {\n builder.startObject(ConstantScoreQueryParser.NAME);\n- builder.field(\"filter\");\n- filterBuilder.toXContent(builder, params);\n+ if (queryBuilder != null) {\n+ assert filterBuilder == null;\n+ builder.field(\"query\");\n+ queryBuilder.toXContent(builder, params);\n+ } else {\n+ builder.field(\"filter\");\n+ filterBuilder.toXContent(builder, params); \n+ }\n+ \n if (boost != -1) {\n builder.field(\"boost\", boost);\n }", "filename": "src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java", "status": "modified" }, { "diff": "@@ -454,6 +454,16 @@ public static FilteredQueryBuilder filteredQuery(QueryBuilder queryBuilder, @Nul\n public static ConstantScoreQueryBuilder constantScoreQuery(FilterBuilder filterBuilder) {\n return new ConstantScoreQueryBuilder(filterBuilder);\n }\n+ \n+ /**\n+ * A query that wraps another query and simply returns a constant score equal to the\n+ * query boost for every document in the query.\n+ *\n+ * @param queryBuilder The query to wrap in a constant score query\n+ */\n+ public static ConstantScoreQueryBuilder constantScoreQuery(QueryBuilder queryBuilder) {\n+ return new ConstantScoreQueryBuilder(queryBuilder);\n+ }\n \n /**\n * A query that simply applies the boost fact to the wrapped query (multiplies it).", "filename": "src/main/java/org/elasticsearch/index/query/QueryBuilders.java", "status": "modified" }, { "diff": "@@ -0,0 +1,101 @@\n+/*\n+ * Licensed to ElasticSearch and Shay Banon under one\n+ * or more contributor license agreements. See the NOTICE file\n+ * distributed with this work for additional information\n+ * regarding copyright ownership. ElasticSearch licenses this\n+ * file to you under the Apache License, Version 2.0 (the\n+ * \"License\"); you may not use this file except in compliance\n+ * with the License. You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing,\n+ * software distributed under the License is distributed on an\n+ * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n+ * KIND, either express or implied. See the License for the\n+ * specific language governing permissions and limitations\n+ * under the License.\n+ */\n+\n+package org.elasticsearch.search.highlight;\n+\n+import java.io.IOException;\n+import java.util.Map;\n+\n+import org.apache.lucene.index.IndexReader;\n+import org.apache.lucene.search.ConstantScoreQuery;\n+import org.apache.lucene.search.FilteredQuery;\n+import org.apache.lucene.search.Query;\n+import org.apache.lucene.search.highlight.QueryScorer;\n+import org.apache.lucene.search.highlight.WeightedSpanTerm;\n+import org.apache.lucene.search.highlight.WeightedSpanTermExtractor;\n+import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery;\n+import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;\n+\n+public final class CustomQueryScorer extends QueryScorer {\n+\n+ public CustomQueryScorer(Query query, IndexReader reader, String field,\n+ String defaultField) {\n+ super(query, reader, field, defaultField);\n+ }\n+\n+ public CustomQueryScorer(Query query, IndexReader reader, String field) {\n+ super(query, reader, field);\n+ }\n+\n+ public CustomQueryScorer(Query query, String field, String defaultField) {\n+ super(query, field, defaultField);\n+ }\n+\n+ public CustomQueryScorer(Query query, String field) {\n+ super(query, field);\n+ }\n+\n+ public CustomQueryScorer(Query query) {\n+ super(query);\n+ }\n+\n+ public CustomQueryScorer(WeightedSpanTerm[] weightedTerms) {\n+ super(weightedTerms);\n+ }\n+\n+ @Override\n+ protected WeightedSpanTermExtractor newTermExtractor(String defaultField) {\n+ return defaultField == null ? new CustomWeightedSpanTermExtractor()\n+ : new CustomWeightedSpanTermExtractor(defaultField);\n+ }\n+ \n+ private static class CustomWeightedSpanTermExtractor extends WeightedSpanTermExtractor {\n+\n+ public CustomWeightedSpanTermExtractor() {\n+ super();\n+ }\n+\n+ public CustomWeightedSpanTermExtractor(String defaultField) {\n+ super(defaultField);\n+ }\n+\n+ @Override\n+ protected void extractUnknownQuery(Query query,\n+ Map<String, WeightedSpanTerm> terms) throws IOException {\n+ if (query instanceof FunctionScoreQuery) {\n+ query = ((FunctionScoreQuery) query).getSubQuery();\n+ extract(query, terms);\n+ } else if (query instanceof FiltersFunctionScoreQuery) {\n+ query = ((FiltersFunctionScoreQuery) query).getSubQuery();\n+ extract(query, terms);\n+ } else if (query instanceof ConstantScoreQuery) {\n+ ConstantScoreQuery q = (ConstantScoreQuery) query;\n+ if (q.getQuery() != null) {\n+ query = q.getQuery();\n+ extract(query, terms);\n+ }\n+ } else if (query instanceof FilteredQuery) {\n+ query = ((FilteredQuery) query).getQuery();\n+ extract(query, terms);\n+ }\n+ }\n+ \n+ }\n+\n+}", "filename": "src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java", "status": "added" }, { "diff": "@@ -138,33 +138,8 @@ public void hitExecute(SearchContext context, HitContext hitContext) throws Elas\n // Don't use the context.query() since it might be rewritten, and we need to pass the non rewritten queries to\n // let the highlighter handle MultiTerm ones\n \n- // QueryScorer uses WeightedSpanTermExtractor to extract terms, but we can't really plug into\n- // it, so, we hack here (and really only support top level queries)\n Query query = context.parsedQuery().query();\n- while (true) {\n- boolean extracted = false;\n- if (query instanceof FunctionScoreQuery) {\n- query = ((FunctionScoreQuery) query).getSubQuery();\n- extracted = true;\n- } else if (query instanceof FiltersFunctionScoreQuery) {\n- query = ((FiltersFunctionScoreQuery) query).getSubQuery();\n- extracted = true;\n- } else if (query instanceof ConstantScoreQuery) {\n- ConstantScoreQuery q = (ConstantScoreQuery) query;\n- if (q.getQuery() != null) {\n- query = q.getQuery();\n- extracted = true;\n- }\n- } else if (query instanceof FilteredQuery) {\n- query = ((FilteredQuery) query).getQuery();\n- extracted = true;\n- }\n- if (!extracted) {\n- break;\n- }\n- }\n-\n- QueryScorer queryScorer = new QueryScorer(query, field.requireFieldMatch() ? mapper.names().indexName() : null);\n+ QueryScorer queryScorer = new CustomQueryScorer(query, field.requireFieldMatch() ? mapper.names().indexName() : null);\n queryScorer.setExpandMultiTermQuery(true);\n Fragmenter fragmenter;\n if (field.numberOfFragments() == 0) {", "filename": "src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java", "status": "modified" }, { "diff": "@@ -312,8 +312,31 @@ public void testPlainHighlighter() throws Exception {\n assertThat(searchResponse.hits().totalHits(), equalTo(1l));\n \n assertThat(searchResponse.hits().getAt(0).highlightFields().get(\"field2\").fragments()[0].string(), equalTo(\"The <xxx>quick</xxx> brown fox jumps over the lazy dog\"));\n- }\n+ \n+ logger.info(\"--> searching on _all with constant score, highlighting on field2\");\n+ source = searchSource()\n+ .query(constantScoreQuery(prefixQuery(\"_all\", \"qui\")))\n+ .from(0).size(60).explain(true)\n+ .highlight(highlight().field(\"field2\").order(\"score\").preTags(\"<xxx>\").postTags(\"</xxx>\"));\n+\n+ searchResponse = client.search(searchRequest(\"test\").source(source).searchType(QUERY_THEN_FETCH).scroll(timeValueMinutes(10))).actionGet();\n+ assertThat(\"Failures \" + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));\n+ assertThat(searchResponse.hits().totalHits(), equalTo(1l));\n+\n+ assertThat(searchResponse.hits().getAt(0).highlightFields().get(\"field2\").fragments()[0].string(), equalTo(\"The <xxx>quick</xxx> brown fox jumps over the lazy dog\"));\n \n+ logger.info(\"--> searching on _all with constant score, highlighting on field2\");\n+ source = searchSource()\n+ .query(boolQuery().should(constantScoreQuery(prefixQuery(\"_all\", \"qui\"))))\n+ .from(0).size(60).explain(true)\n+ .highlight(highlight().field(\"field2\").order(\"score\").preTags(\"<xxx>\").postTags(\"</xxx>\"));\n+\n+ searchResponse = client.search(searchRequest(\"test\").source(source).searchType(QUERY_THEN_FETCH).scroll(timeValueMinutes(10))).actionGet();\n+ assertThat(\"Failures \" + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));\n+ assertThat(searchResponse.hits().totalHits(), equalTo(1l));\n+ assertThat(searchResponse.hits().getAt(0).highlightFields().get(\"field2\").fragments()[0].string(), equalTo(\"The <xxx>quick</xxx> brown fox jumps over the lazy dog\"));\n+ }\n+ \n @Test\n public void testFastVectorHighlighter() throws Exception {\n try {", "filename": "src/test/java/org/elasticsearch/test/integration/search/highlight/HighlighterSearchTests.java", "status": "modified" } ] }
{ "body": "To reproduce, start two ES nodes and run the following script:\n\n```\ncurl -XDELETE http://localhost:9200/testidx\ncurl -XPUT http://localhost:9200/testidx -d '{\n \"settings\" : {\n \"index\" : {\n \"number_of_shards\" : 1,\n \"number_of_replicas\" : 0\n }\n },\n \"mappings\" : {\n \"rec\" : {\n \"_source\" : { \"enabled\" : false },\n \"properties\" : {\n \"from\" : { \"type\": \"string\", \"store\": \"yes\" }\n }\n }\n }\n}'\ncurl -XPUT http://localhost:9200/testidx/rec/1 -d '{\n \"from\" : [\"user3@test.com\",\"user2@test.com\",\"user5@test.com\"]\n}'\necho\ncurl -XPOST http://localhost:9200/testidx/_refresh\necho\ncurl localhost:9200/testidx/_search -d '{\n \"highlight\": {\n \"fields\":{\"from\":{\"number_of_fragments\":0}}\n },\n \"fields\":[\"*\"],\n \"size\":10,\n \"sort\":[\"_score\"],\n \"query\":{\n \"query_string\":{\n \"default_field\":\"from\",\n \"query\":\"*:*\"\n }\n }\n}' && echo\ncurl localhost:9201/testidx/_search -d '{\n \"highlight\": {\n \"fields\":{\"from\":{\"number_of_fragments\":0}}\n },\n \"fields\":[\"*\"],\n \"size\":10,\n \"sort\":[\"_score\"],\n \"query\":{\n \"query_string\":{\n \"default_field\":\"from\",\n \"query\":\"*:*\"\n }\n }\n}' && echo\n```\n\nOne of the search requests fails and the node where shard is allocated throws the following exception:\n\n```\n[2011-10-07 11:16:57,872][DEBUG][action.search.type ] [Maha Yogi] [testidx][0], node[czKz17uBQiaTczE_OzQ0GA], [P], s[STARTED]: Failed to execute [org.elasticsearch.action.search.SearchRequest@43233ac]\norg.elasticsearch.transport.RemoteTransportException: [Mikhail Rasputin][inet[/10.0.1.8:9300]][search/phase/query+fetch]\nCaused by: java.lang.NullPointerException\n at org.elasticsearch.common.io.stream.HandlesStreamOutput.writeUTF(HandlesStreamOutput.java:54)\n at org.elasticsearch.search.highlight.HighlightField.writeTo(HighlightField.java:110)\n at org.elasticsearch.search.internal.InternalSearchHit.writeTo(InternalSearchHit.java:574)\n at org.elasticsearch.search.internal.InternalSearchHits.writeTo(InternalSearchHits.java:246)\n at org.elasticsearch.search.fetch.FetchSearchResult.writeTo(FetchSearchResult.java:101)\n at org.elasticsearch.search.fetch.QueryFetchSearchResult.writeTo(QueryFetchSearchResult.java:90)\n at org.elasticsearch.transport.support.TransportStreams.buildResponse(TransportStreams.java:136)\n at org.elasticsearch.transport.netty.NettyTransportChannel.sendResponse(NettyTransportChannel.java:74)\n at org.elasticsearch.transport.netty.NettyTransportChannel.sendResponse(NettyTransportChannel.java:66)\n at org.elasticsearch.search.action.SearchServiceTransportAction$SearchQueryFetchTransportHandler.messageReceived(SearchServiceTransportAction.java:502)\n at org.elasticsearch.search.action.SearchServiceTransportAction$SearchQueryFetchTransportHandler.messageReceived(SearchServiceTransportAction.java:492)\n at org.elasticsearch.transport.netty.MessageChannelHandler$RequestHandler.run(MessageChannelHandler.java:238)\n at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)\n at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)\n at java.lang.Thread.run(Thread.java:680)\n```\n", "comments": [], "number": 1380, "title": "Highlighting fails with NPE for multifield and number_of_fragments:0" }
{ "body": "See #1380 for more information\n", "number": 1381, "review_comments": [], "title": "Fix NPE in HighlightField serialization. Fixes #1380" }
{ "commits": [ { "message": "Fix NPE in HighlightField serialization. Fixes #1380" } ], "files": [ { "diff": "@@ -169,9 +169,9 @@ public int compare(TextFragment o1, TextFragment o2) {\n }\n });\n }\n- String[] fragments;\n+ String[] fragments = null;\n // number_of_fragments is set to 0 but we have a multivalued field\n- if (field.numberOfFragments() == 0 && textsToHighlight.size() > 1) {\n+ if (field.numberOfFragments() == 0 && textsToHighlight.size() > 1 && fragsList.size() > 0) {\n fragments = new String[1];\n for (int i = 0; i < fragsList.size(); i++) {\n fragments[0] = (fragments[0] != null ? (fragments[0] + \" \") : \"\") + fragsList.get(i).toString();\n@@ -185,7 +185,7 @@ public int compare(TextFragment o1, TextFragment o2) {\n }\n }\n \n- if (fragments.length > 0) {\n+ if (fragments != null && fragments.length > 0) {\n HighlightField highlightField = new HighlightField(field.field(), fragments);\n highlightFields.put(highlightField.name(), highlightField);\n }", "filename": "modules/elasticsearch/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java", "status": "modified" } ] }
{ "body": "Delete API: Using refresh parameter might not refresh delete operation on replica\n", "comments": [], "number": 1047, "title": "Delete API: Using refresh parameter might not refresh delete operation on replica" }
{ "body": "These refresh calls are no longer needed since #1047 was fixed.\n", "number": 1050, "review_comments": [], "title": "Remove unnecessary refresh after delete" }
{ "commits": [ { "message": "Remove unnecessary refresh after delete" } ], "files": [ { "diff": "@@ -86,7 +86,6 @@ protected Client getClient() {\n \n logger.info(\"--> deleting with no routing, should not delete anything\");\n client.prepareDelete(\"test\", \"type1\", \"1\").setRefresh(true).execute().actionGet();\n- client.admin().indices().prepareRefresh().execute().actionGet();\n for (int i = 0; i < 5; i++) {\n assertThat(client.prepareGet(\"test\", \"type1\", \"1\").execute().actionGet().exists(), equalTo(false));\n assertThat(client.prepareGet(\"test\", \"type1\", \"1\").setRouting(\"0\").execute().actionGet().exists(), equalTo(true));\n@@ -95,7 +94,6 @@ protected Client getClient() {\n \n logger.info(\"--> deleting with routing alias, should delete\");\n client.prepareDelete(\"alias0\", \"type1\", \"1\").setRefresh(true).execute().actionGet();\n- client.admin().indices().prepareRefresh().execute().actionGet();\n for (int i = 0; i < 5; i++) {\n assertThat(client.prepareGet(\"test\", \"type1\", \"1\").execute().actionGet().exists(), equalTo(false));\n assertThat(client.prepareGet(\"test\", \"type1\", \"1\").setRouting(\"0\").execute().actionGet().exists(), equalTo(false));", "filename": "modules/test/integration/src/test/java/org/elasticsearch/test/integration/routing/AliasRoutingTests.java", "status": "modified" } ] }
{ "body": "https://github.com/xiaoxiangmoe/issue-vue-2.7-on-click-type-error.git\r\n\r\n```\r\npnpm run type-check\r\n```\r\n\r\n```\r\nsrc/App.vue:12:6 - error TS2559: Type '{ onClick: any; }' has no properties in common with type 'Readonly<Partial<{}> & Omit<Readonly<ExtractPropTypes<{ border: { type: PropType<boolean>; }; }>>, never>>'.\r\n\r\n12 <HelloWorld @click=\"handleClick\" />\r\n ~~~~~~~~~~\r\n\r\n\r\nFound 1 error in src/App.vue:12\r\n```\r\n\r\n\r\n---\r\nHelloWorld.vue\r\n```vue\r\n<script setup lang=\"ts\">\r\n\r\ninterface ButtonProps {\r\n border?: boolean; \r\n}\r\n\r\nconst props = defineProps<ButtonProps>() \r\ndefineEmits<{\r\n (event: 'click',payload: MouseEvent): void\r\n}>()\r\n</script>\r\n<template>\r\n <div id=\"app\">\r\n <button @click=\"$emit('click', $event)\">Click me</button>\r\n </div>\r\n</template>\r\n```\r\n\r\nThis error come since vue-tsc 1.7.12", "comments": [ { "body": "@xiaoxiangmoe Seemed to be a vue2 type issue. It doesn't convert emits to props", "created_at": "2023-10-17T08:47:39Z" }, { "body": "@LinusBorg could you help transfer this to vuejs/vue? thanks.", "created_at": "2023-10-23T07:19:40Z" }, { "body": "? Are you GPT?", "created_at": "2023-11-15T12:05:04Z" }, { "body": "Also seeing something similar.\r\n\r\n```shell\r\nmodules/src/components/CombinedModal.vue:94:14 - error TS2345: Argument of type '{ props: any; onClose: any; }' is not assignable to parameter of type 'Readonly<Partial<{ [x: number]: string; }> & Omit<Readonly<ExtractPropTypes<string[]>>, DefaultKeys<string[]>>> & Record<...>'.\r\n Type '{ props: any; onClose: any; }' is not assignable to type 'Readonly<Partial<{ [x: number]: string; }> & Omit<Readonly<ExtractPropTypes<string[]>>, DefaultKeys<string[]>>>'.\r\n Types of property 'toString' are incompatible.\r\n Type '() => string' is not assignable to type '(() => string) & string'.\r\n\r\n94 <MergeModal @close=\"onClose\" v-bind:props=\"props\"></MergeModal>\r\n```\r\n\r\nDon't think I'm really doing anything special.\r\n\r\nShim file (removing doesn't seem to change anything): \r\n\r\n```ts\r\ndeclare module \"*.vue\" {\r\n import type { DefineComponent } from \"vue\";\r\n const component: DefineComponent;\r\n export default component;\r\n}\r\n```\r\n\r\nRunning type checking with `vue-tsc --noEmit` with this config file:\r\n\r\n```json\r\n{\r\n \"extends\": \"../tsconfig.json\",\r\n \"include\": [\r\n \"./types/shims-vue.d.ts\",\r\n \"./types/vuejs-datepicker.d.ts\",\r\n \"**/*.js\", // required even if we're not directly type checking (see `allowJs` below)\r\n \"**/*.ts\",\r\n \"**/*.vue\"\r\n ],\r\n \"compilerOptions\": {\r\n \"composite\": true,\r\n \"allowJs\": true, // permits .ts/.vue files we're type checking to import .js files, enabling us to gradually add type checking w/o requiring an all-at-once migration. Worth noting that this is completely distinct from `checkJs`, which tells typescript to actually type check them.\r\n \"types\": [\r\n \"vite/client\" // Supports Vite's `import.meta.env`\r\n ],\r\n \"baseUrl\": \".\",\r\n \"paths\": {\r\n \"@/*\": [\"./*\"]\r\n }\r\n }\r\n}\r\n```\r\n\r\nInheriting from this config file:\r\n\r\n```json\r\n{\r\n \"compilerOptions\": {\r\n // Base options / sensible defaults that we want to pretty universally apply across all of our services\r\n \"target\": \"esnext\",\r\n \"module\": \"esnext\",\r\n \"moduleResolution\": \"node\",\r\n \"resolveJsonModule\": true,\r\n \"esModuleInterop\": true,\r\n \"forceConsistentCasingInFileNames\": true,\r\n \"strict\": true,\r\n \"skipLibCheck\": true\r\n }\r\n}\r\n```", "created_at": "2023-12-01T20:10:15Z" }, { "body": "It seems like there is a type mismatch between the props defined in HelloWorld.vue and how it is being used in App.vue. You need to make sure that the props passed to the HelloWorld component match the expected props.", "created_at": "2024-01-20T20:55:47Z" }, { "body": "In volar we convert `v-on:xxx` to `onXxx` and pass them as props to type-check required emits. Vue3 automatically converts events to props but vue2 doesn't. Just FYI @ZAID-BAARAB ", "created_at": "2024-01-21T07:47:02Z" } ], "number": 13104, "title": "Vue 2.7 strictTemplates error" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix: fix #13104\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `main` branch for v2.x (or to a previous version branch)\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 13133, "review_comments": [], "title": "fix(type): convert emits to props" }
{ "commits": [ { "message": "fix(type): convert emits to props" }, { "message": "feat: support extends and mixins" } ], "files": [ { "diff": "@@ -966,6 +966,47 @@ describe('emits', () => {\n }\n }\n })\n+\n+ // should have `onXXX` props for emits\n+ const a = defineComponent({\n+ props: {\n+ bar: String\n+ },\n+ emits: {\n+ foo: (n: number) => n > 0\n+ },\n+ setup(props) {\n+ expectType<((n: number) => boolean) | undefined>(props.onFoo)\n+ }\n+ })\n+\n+ const b = defineComponent({\n+ extends: a,\n+ props: {\n+ bar2: String\n+ },\n+ emits: {\n+ foo2: (n: number) => n > 0\n+ },\n+ setup(props) {\n+ expectType<((n: number) => boolean) | undefined>(props.onFoo)\n+ }\n+ })\n+\n+ defineComponent({\n+ mixins: [a, b],\n+ props: {\n+ bar3: String\n+ },\n+ emits: {\n+ foo3: (n: number) => n > 0\n+ },\n+ setup(props) {\n+ expectType<((n: number) => boolean) | undefined>(props.onFoo)\n+ expectType<((n: number) => boolean) | undefined>(props.onFoo2)\n+ expectType<((n: number) => boolean) | undefined>(props.onFoo3)\n+ }\n+ })\n })\n \n // describe('componentOptions setup should be `SetupContext`', () => {", "filename": "types/test/v3/define-component-test.tsx", "status": "modified" }, { "diff": "@@ -1,7 +1,7 @@\n import { Vue } from './vue'\n import { VNode } from './vnode'\n import { ComponentOptions as Vue2ComponentOptions } from './options'\n-import { EmitsOptions, SetupContext } from './v3-setup-context'\n+import { EmitsOptions, EmitsToProps, SetupContext } from './v3-setup-context'\n import { Data, LooseRequired, UnionToIntersection } from './common'\n import {\n ComponentPropsOptions,\n@@ -52,7 +52,7 @@ export type SetupFunction<\n Emits extends EmitsOptions = {}\n > = (\n this: void,\n- props: Readonly<Props>,\n+ props: Readonly<Props & EmitsToProps<Emits>>,\n ctx: SetupContext<Emits>\n ) => RawBindings | (() => VNode | null) | void\n ", "filename": "types/v3-component-options.d.ts", "status": "modified" }, { "diff": "@@ -17,7 +17,7 @@ import {\n CreateComponentPublicInstance\n } from './v3-component-public-instance'\n import { Data, HasDefined } from './common'\n-import { EmitsOptions } from './v3-setup-context'\n+import { EmitsOptions, EmitsToProps } from './v3-setup-context'\n import { CreateElement, RenderContext } from './umd'\n \n export type DefineComponent<\n@@ -31,9 +31,9 @@ export type DefineComponent<\n E extends EmitsOptions = {},\n EE extends string = string,\n Props = Readonly<\n- PropsOrPropOptions extends ComponentPropsOptions\n+ (PropsOrPropOptions extends ComponentPropsOptions\n ? ExtractPropTypes<PropsOrPropOptions>\n- : PropsOrPropOptions\n+ : PropsOrPropOptions) & EmitsToProps<E>\n >,\n Defaults = ExtractDefaultPropTypes<PropsOrPropOptions>\n > = ComponentPublicInstanceConstructor<", "filename": "types/v3-define-component.d.ts", "status": "modified" }, { "diff": "@@ -13,6 +13,25 @@ export type ObjectEmitsOptions = Record<\n \n export type EmitsOptions = ObjectEmitsOptions | string[]\n \n+export type EmitsToProps<T extends EmitsOptions> = T extends string[]\n+ ? {\n+ [K in string & `on${Capitalize<T[number]>}`]?: (...args: any[]) => any\n+ }\n+ : T extends ObjectEmitsOptions\n+ ? {\n+ [K in string &\n+ `on${Capitalize<string & keyof T>}`]?: K extends `on${infer C}`\n+ ? T[Uncapitalize<C>] extends null\n+ ? (...args: any[]) => any\n+ : (\n+ ...args: T[Uncapitalize<C>] extends (...args: infer P) => any\n+ ? P\n+ : never\n+ ) => any\n+ : never\n+ }\n+ : {}\n+\n export type EmitFn<\n Options = ObjectEmitsOptions,\n Event extends keyof Options = keyof Options,", "filename": "types/v3-setup-context.d.ts", "status": "modified" } ] }
{ "body": "https://github.com/xiaoxiangmoe/issue-vue-2.7-on-click-type-error.git\r\n\r\n```\r\npnpm run type-check\r\n```\r\n\r\n```\r\nsrc/App.vue:12:6 - error TS2559: Type '{ onClick: any; }' has no properties in common with type 'Readonly<Partial<{}> & Omit<Readonly<ExtractPropTypes<{ border: { type: PropType<boolean>; }; }>>, never>>'.\r\n\r\n12 <HelloWorld @click=\"handleClick\" />\r\n ~~~~~~~~~~\r\n\r\n\r\nFound 1 error in src/App.vue:12\r\n```\r\n\r\n\r\n---\r\nHelloWorld.vue\r\n```vue\r\n<script setup lang=\"ts\">\r\n\r\ninterface ButtonProps {\r\n border?: boolean; \r\n}\r\n\r\nconst props = defineProps<ButtonProps>() \r\ndefineEmits<{\r\n (event: 'click',payload: MouseEvent): void\r\n}>()\r\n</script>\r\n<template>\r\n <div id=\"app\">\r\n <button @click=\"$emit('click', $event)\">Click me</button>\r\n </div>\r\n</template>\r\n```\r\n\r\nThis error come since vue-tsc 1.7.12", "comments": [ { "body": "@xiaoxiangmoe Seemed to be a vue2 type issue. It doesn't convert emits to props", "created_at": "2023-10-17T08:47:39Z" }, { "body": "@LinusBorg could you help transfer this to vuejs/vue? thanks.", "created_at": "2023-10-23T07:19:40Z" }, { "body": "? Are you GPT?", "created_at": "2023-11-15T12:05:04Z" }, { "body": "Also seeing something similar.\r\n\r\n```shell\r\nmodules/src/components/CombinedModal.vue:94:14 - error TS2345: Argument of type '{ props: any; onClose: any; }' is not assignable to parameter of type 'Readonly<Partial<{ [x: number]: string; }> & Omit<Readonly<ExtractPropTypes<string[]>>, DefaultKeys<string[]>>> & Record<...>'.\r\n Type '{ props: any; onClose: any; }' is not assignable to type 'Readonly<Partial<{ [x: number]: string; }> & Omit<Readonly<ExtractPropTypes<string[]>>, DefaultKeys<string[]>>>'.\r\n Types of property 'toString' are incompatible.\r\n Type '() => string' is not assignable to type '(() => string) & string'.\r\n\r\n94 <MergeModal @close=\"onClose\" v-bind:props=\"props\"></MergeModal>\r\n```\r\n\r\nDon't think I'm really doing anything special.\r\n\r\nShim file (removing doesn't seem to change anything): \r\n\r\n```ts\r\ndeclare module \"*.vue\" {\r\n import type { DefineComponent } from \"vue\";\r\n const component: DefineComponent;\r\n export default component;\r\n}\r\n```\r\n\r\nRunning type checking with `vue-tsc --noEmit` with this config file:\r\n\r\n```json\r\n{\r\n \"extends\": \"../tsconfig.json\",\r\n \"include\": [\r\n \"./types/shims-vue.d.ts\",\r\n \"./types/vuejs-datepicker.d.ts\",\r\n \"**/*.js\", // required even if we're not directly type checking (see `allowJs` below)\r\n \"**/*.ts\",\r\n \"**/*.vue\"\r\n ],\r\n \"compilerOptions\": {\r\n \"composite\": true,\r\n \"allowJs\": true, // permits .ts/.vue files we're type checking to import .js files, enabling us to gradually add type checking w/o requiring an all-at-once migration. Worth noting that this is completely distinct from `checkJs`, which tells typescript to actually type check them.\r\n \"types\": [\r\n \"vite/client\" // Supports Vite's `import.meta.env`\r\n ],\r\n \"baseUrl\": \".\",\r\n \"paths\": {\r\n \"@/*\": [\"./*\"]\r\n }\r\n }\r\n}\r\n```\r\n\r\nInheriting from this config file:\r\n\r\n```json\r\n{\r\n \"compilerOptions\": {\r\n // Base options / sensible defaults that we want to pretty universally apply across all of our services\r\n \"target\": \"esnext\",\r\n \"module\": \"esnext\",\r\n \"moduleResolution\": \"node\",\r\n \"resolveJsonModule\": true,\r\n \"esModuleInterop\": true,\r\n \"forceConsistentCasingInFileNames\": true,\r\n \"strict\": true,\r\n \"skipLibCheck\": true\r\n }\r\n}\r\n```", "created_at": "2023-12-01T20:10:15Z" }, { "body": "It seems like there is a type mismatch between the props defined in HelloWorld.vue and how it is being used in App.vue. You need to make sure that the props passed to the HelloWorld component match the expected props.", "created_at": "2024-01-20T20:55:47Z" }, { "body": "In volar we convert `v-on:xxx` to `onXxx` and pass them as props to type-check required emits. Vue3 automatically converts events to props but vue2 doesn't. Just FYI @ZAID-BAARAB ", "created_at": "2024-01-21T07:47:02Z" } ], "number": 13104, "title": "Vue 2.7 strictTemplates error" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix: fixes #13104\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `main` branch for v2.x (or to a previous version branch)\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [x] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\nConverts emits to props.\r\n\r\n**Other information:**\r\n", "number": 13105, "review_comments": [], "title": "fix: convert emits to props, fix #13104" }
{ "commits": [ { "message": "fix: convert emits to props" }, { "message": "test: add tests" } ], "files": [ { "diff": "@@ -966,6 +966,16 @@ describe('emits', () => {\n }\n }\n })\n+\n+ // should have `onXXX` props for emits\n+ defineComponent({\n+ emits: {\n+ foo: (n: number) => n > 0\n+ },\n+ setup(props) {\n+ expectType<((n: number) => boolean) | undefined>(props.onFoo)\n+ }\n+ })\n })\n \n // describe('componentOptions setup should be `SetupContext`', () => {", "filename": "types/test/v3/define-component-test.tsx", "status": "modified" }, { "diff": "@@ -1,7 +1,7 @@\n import { Vue } from './vue'\n import { VNode } from './vnode'\n import { ComponentOptions as Vue2ComponentOptions } from './options'\n-import { EmitsOptions, SetupContext } from './v3-setup-context'\n+import { EmitsOptions, EmitsToProps, SetupContext } from './v3-setup-context'\n import { Data, LooseRequired, UnionToIntersection } from './common'\n import {\n ComponentPropsOptions,\n@@ -137,6 +137,13 @@ export type ExtractComputedReturns<T extends any> = {\n : never\n }\n \n+export type ResolveProps<PropsOrPropOptions, E extends EmitsOptions> = Readonly<\n+ PropsOrPropOptions extends ComponentPropsOptions\n+ ? ExtractPropTypes<PropsOrPropOptions>\n+ : PropsOrPropOptions\n+> &\n+ ({} extends E ? {} : EmitsToProps<E>)\n+\n export type ComponentOptionsWithProps<\n PropsOptions = ComponentPropsOptions,\n RawBindings = Data,\n@@ -147,7 +154,7 @@ export type ComponentOptionsWithProps<\n Extends extends ComponentOptionsMixin = ComponentOptionsMixin,\n Emits extends EmitsOptions = {},\n EmitsNames extends string = string,\n- Props = ExtractPropTypes<PropsOptions>,\n+ Props = ResolveProps<PropsOptions, Emits>,\n Defaults = ExtractDefaultPropTypes<PropsOptions>\n > = ComponentOptionsBase<\n Props,\n@@ -185,7 +192,7 @@ export type ComponentOptionsWithArrayProps<\n Extends extends ComponentOptionsMixin = ComponentOptionsMixin,\n Emits extends EmitsOptions = {},\n EmitsNames extends string = string,\n- Props = Readonly<{ [key in PropNames]?: any }>\n+ Props = Readonly<{ [key in PropNames]?: any }> & EmitsToProps<Emits>\n > = ComponentOptionsBase<\n Props,\n RawBindings,\n@@ -221,9 +228,10 @@ export type ComponentOptionsWithoutProps<\n Mixin extends ComponentOptionsMixin = ComponentOptionsMixin,\n Extends extends ComponentOptionsMixin = ComponentOptionsMixin,\n Emits extends EmitsOptions = {},\n- EmitsNames extends string = string\n+ EmitsNames extends string = string,\n+ PropsWithEmits = Props & EmitsToProps<Emits>\n > = ComponentOptionsBase<\n- Props,\n+ PropsWithEmits,\n RawBindings,\n D,\n C,", "filename": "types/v3-component-options.d.ts", "status": "modified" }, { "diff": "@@ -10,7 +10,8 @@ import {\n ComponentOptionsWithArrayProps,\n ComponentOptionsWithProps,\n ComponentOptionsMixin,\n- ComponentOptionsBase\n+ ComponentOptionsBase,\n+ ResolveProps\n } from './v3-component-options'\n import {\n ComponentPublicInstanceConstructor,\n@@ -161,7 +162,7 @@ export function defineComponent<\n Extends,\n Emits,\n EmitsNames,\n- Props\n+ ResolveProps<PropsOptions, Emits>\n >\n : { functional?: never } & ComponentOptionsWithProps<\n PropsOptions,", "filename": "types/v3-define-component.d.ts", "status": "modified" }, { "diff": "@@ -13,6 +13,25 @@ export type ObjectEmitsOptions = Record<\n \n export type EmitsOptions = ObjectEmitsOptions | string[]\n \n+export type EmitsToProps<T extends EmitsOptions> = T extends string[]\n+ ? {\n+ [K in string & `on${Capitalize<T[number]>}`]?: (...args: any[]) => any\n+ }\n+ : T extends ObjectEmitsOptions\n+ ? {\n+ [K in string &\n+ `on${Capitalize<string & keyof T>}`]?: K extends `on${infer C}`\n+ ? T[Uncapitalize<C>] extends null\n+ ? (...args: any[]) => any\n+ : (\n+ ...args: T[Uncapitalize<C>] extends (...args: infer P) => any\n+ ? P\n+ : never\n+ ) => any\n+ : never\n+ }\n+ : {}\n+\n export type EmitFn<\n Options = ObjectEmitsOptions,\n Event extends keyof Options = keyof Options,", "filename": "types/v3-setup-context.d.ts", "status": "modified" } ] }
{ "body": "### Version\r\n2.7.14\r\n\r\n### Reproduction link\r\nI think it's not necessary as it's obvious in source code\r\n\r\n\r\n\r\n### Steps to reproduce\r\nInstall prettier v3 in a project depending on vue/compiler-sfc v2\r\n\r\n### What is expected?\r\nNo error\r\n\r\n### What is actually happening?\r\nFail in compiling template.\r\n\r\nI found compiler-sfc tried to format code with API of prettier but `format` of Prettier becomes async in v3 and it returns a Promise instead of string.\r\n\r\nhttps://github.com/vuejs/vue/blob/49b6bd4264c25ea41408f066a1835f38bf6fe9f1/packages/compiler-sfc/src/compileTemplate.ts#L179\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "Added a workaround in vue-loader v15.10.2. https://github.com/vuejs/vue-loader/releases/tag/v15.10.2\r\n\r\nBut let's keep this issue open so that we can fix it properly in vue core later.", "created_at": "2023-08-23T09:19:34Z" }, { "body": "> But let's keep this issue open so that we can fix it properly in vue core later.\r\n\r\nHi @sodatea!\r\n\r\nThe problem is caused by not specifying the required prettier version in the `compiler/sfc` package. \r\n\r\nWhile Prettier 3 is not supported, a simple solution could be to specify the Prettier version in optional dependencies. It would keep prettifying working even if Prettier 3 is installed. \r\n \r\nThere is a PR that does this: https://github.com/vuejs/vue/pull/13053\r\n\r\nThe same solution was applied by you before, but it was lost during migration to Vue 2.7: https://github.com/vuejs/component-compiler-utils/commit/aea1b79765f0f6d688e64b7698d5ff62862002d1", "created_at": "2023-10-31T22:50:31Z" }, { "body": "I faced this issue via vue-jest.\r\n\r\nIt throws errors like this.\r\n\r\n```\r\n FAIL components/atoms/MButton/index.spec.ts\r\n ● Test suite failed to run\r\n\r\n TypeError: Expected a SourceNode, string, or an array of SourceNodes and strings. Got [object Promise]\r\n\r\n 2 | import type { Wrapper } from '@vue/test-utils'\r\n 3 | import { RouterLinkStub, mount } from '@vue/test-utils'\r\n > 4 | import MButton from '~/components/atoms/MButton/index.vue'\r\n | ^\r\n 5 |\r\n 6 | const factory = (\r\n 7 | propsData?: object,\r\n\r\n at SourceNode_add [as add] (../../node_modules/@vue/vue2-jest/node_modules/source-map/lib/source-node.js:178:11)\r\n at addToSourceMap (../../node_modules/@vue/vue2-jest/lib/generate-code.js:14:12)\r\n at generateCode (../../node_modules/@vue/vue2-jest/lib/generate-code.js:49:5)\r\n at Object.module.exports [as process] (../../node_modules/@vue/vue2-jest/lib/process.js:169:18)\r\n at Object.<anonymous> (components/atoms/MButton/index.spec.ts:4:1)\r\n```\r\n\r\nCurrent workaround is to set the `prettify` option to false via `jest.config.ts`\r\n\r\n\r\n```ts\r\nmodule.exports = {\r\n globals: {\r\n 'vue-jest': {\r\n templateCompiler: {\r\n prettify: false,\r\n },\r\n },\r\n },\r\n // ...\r\n}\r\n```", "created_at": "2023-11-29T05:24:24Z" }, { "body": "> I faced this issue via vue-jest.\r\n> \r\n> It throws errors like this.\r\n> \r\n> ```\r\n> FAIL components/atoms/MButton/index.spec.ts\r\n> ● Test suite failed to run\r\n> \r\n> TypeError: Expected a SourceNode, string, or an array of SourceNodes and strings. Got [object Promise]\r\n> \r\n> 2 | import type { Wrapper } from '@vue/test-utils'\r\n> 3 | import { RouterLinkStub, mount } from '@vue/test-utils'\r\n> > 4 | import MButton from '~/components/atoms/MButton/index.vue'\r\n> | ^\r\n> 5 |\r\n> 6 | const factory = (\r\n> 7 | propsData?: object,\r\n> \r\n> at SourceNode_add [as add] (../../node_modules/@vue/vue2-jest/node_modules/source-map/lib/source-node.js:178:11)\r\n> at addToSourceMap (../../node_modules/@vue/vue2-jest/lib/generate-code.js:14:12)\r\n> at generateCode (../../node_modules/@vue/vue2-jest/lib/generate-code.js:49:5)\r\n> at Object.module.exports [as process] (../../node_modules/@vue/vue2-jest/lib/process.js:169:18)\r\n> at Object.<anonymous> (components/atoms/MButton/index.spec.ts:4:1)\r\n> ```\r\n> \r\n> Current workaround is to set the `prettify` option to false via `jest.config.ts`\r\n> \r\n> ```ts\r\n> module.exports = {\r\n> globals: {\r\n> 'vue-jest': {\r\n> templateCompiler: {\r\n> prettify: false,\r\n> },\r\n> },\r\n> },\r\n> // ...\r\n> }\r\n> ```\r\n\r\nTysm for this fix @yshrsmz", "created_at": "2024-01-04T23:42:22Z" } ], "number": 13052, "title": "compiler-sfc not compatible with prettier v3" }
{ "body": "close: #13052\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [ ] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [x] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `main` branch for v2.x (or to a previous version branch)\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n\r\n`@vue/compiler-sfc` optionally uses Prettier and it is not compatible with Prettier v3.\r\n\r\nBut `prettier` is not listed in the `package.json`.\r\n\r\nIt wasn't a problem with Prettier v2 because prettier is [optional dependency of `component-compiler-utils`](https://github.com/vuejs/component-compiler-utils/blob/82a37174990e31eaec609887a0ec262f06b454dd/package.json#L69). So Prettier is installed anyway and everything works.\r\n\r\nBut if some project has `Prettier@3`, then `@vue/compiler-sfc` uses project's prettier and fails.\r\n\r\nThere was the same issue early in [component-compiler-utils](https://github.com/vuejs/component-compiler-utils). See:\r\n- https://github.com/vuejs/component-compiler-utils/commit/aea1b79765f0f6d688e64b7698d5ff62862002d1\r\n- https://github.com/vuejs/component-compiler-utils/pull/89\r\n\r\nThis PR specifies Prettier as an `optionalDependency` of `@vue/compiler-sfc`. With specified dependency, it will use `prettier@2` even if a project has `prettier@3`.\r\n\r\nP.S. I used the same versions as it was in the `component-compiler-utils`. It seems it has the same code.\r\n\r\n**Alternative solution:** make `@vue/compiler-sfc` compatible with both `Prettier@\"1 || 2\"` and `Prettier@3`. The problem is that it will make the compile function async.", "number": 13053, "review_comments": [], "title": "chore(compiler-sfc): specify prettier version (fix #13052)" }
{ "commits": [ { "message": "chore(compiler-sfc): specify prettier version\n\nclose: #13052" } ], "files": [ { "diff": "@@ -30,5 +30,8 @@\n \"pug\": \"^3.0.2\",\n \"sass\": \"^1.52.3\",\n \"stylus\": \"^0.58.1\"\n+ },\n+ \"optionalDependencies\": {\n+ \"prettier\": \"^1.18.2 || ^2.0.0\"\n }\n }", "filename": "packages/compiler-sfc/package.json", "status": "modified" }, { "diff": "@@ -150,6 +150,10 @@ importers:\n source-map:\n specifier: ^0.6.1\n version: 0.6.1\n+ optionalDependencies:\n+ prettier:\n+ specifier: ^1.18.2 || ^2.0.0\n+ version: 2.7.1\n devDependencies:\n '@babel/types':\n specifier: ^7.19.4\n@@ -5098,7 +5102,6 @@ packages:\n resolution: {integrity: sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==}\n engines: {node: '>=10.13.0'}\n hasBin: true\n- dev: true\n \n /pretty-format@29.7.0:\n resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==}", "filename": "pnpm-lock.yaml", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.11\r\n\r\n### Reproduction link\r\n[https://jsfiddle.net/AleksandrasNovikovas/w042x1c8/](https://jsfiddle.net/AleksandrasNovikovas/w042x1c8/)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nRun provided fiddle. There are three svg boxes with foreignObject:\r\n1. contains simple html tags\r\n2. contains simple vue component\r\n3. contains complex (with slot) vue component\r\n\r\n\r\n### What is expected?\r\nAll three boxes should show link and input elements\r\n\r\n### What is actually happening?\r\nThird box does not show link and input elements.\r\n\r\n---\r\nWhile inspecting DOM (in chrome or in firefox) you will find that elements of second box and third box are identical.\r\nProblem is their types: (in chome dev console select element and tab properties)\r\nselect input element from second box and you will find following list: Object->EventTarget->Node->Element->HTMLElement->HTMLInputElement->input;\r\nselect input element from third box and you will find following list: Object->EventTarget->Node->Element->SVGElement->input;\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "#11349 This is not a problem anymore because I have fixed it and submitted PR😁.", "created_at": "2020-04-26T14:15:38Z" }, { "body": "Can we get this reviewed and fixed? I don't get why this obvious issue is not treated.", "created_at": "2021-02-19T07:16:50Z" } ], "number": 11315, "title": "Components slots are not rendered inside svg foreignObject" }
{ "body": "Fix #11315\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [ ] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [ ] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 13008, "review_comments": [], "title": "fix(svg): apply every namespace in slot when call applyNS" }
{ "commits": [ { "message": "fix(namespace): apply every namespace in slot when call applyNS (#11315)" }, { "message": "refactor: extract variable" } ], "files": [ { "diff": "@@ -146,9 +146,14 @@ function applyNS(vnode, ns, force?: boolean) {\n ns = undefined\n force = true\n }\n- if (isDef(vnode.children)) {\n- for (let i = 0, l = vnode.children.length; i < l; i++) {\n- const child = vnode.children[i]\n+ const children =\n+ vnode.children ||\n+ // #11315\n+ (vnode.componentOptions && vnode.componentOptions.children)\n+\n+ if (isDef(children)) {\n+ for (let i = 0, l = children.length; i < l; i++) {\n+ const child = children[i]\n if (\n isDef(child.tag) &&\n (isUndef(child.ns) || (isTrue(force) && child.tag !== 'svg'))", "filename": "src/core/vdom/create-element.ts", "status": "modified" }, { "diff": "@@ -147,6 +147,49 @@ describe('create-element', () => {\n expect(vnode.children[0].children[1].ns).toBe('svg')\n })\n \n+ // #11315\n+ it('render svg foreignObject nested component slot with correct namespace', () => {\n+ const vm = new Vue({\n+ template: `\n+ <svg>\n+ <box></box>\n+ </svg>\n+ `,\n+ components: {\n+ 'box': {\n+ template: `\n+ <foreignObject>\n+ <comp-with-slot>\n+ <p></p><svg></svg>\n+ </comp-with-slot>\n+ </foreignObject>\n+ `,\n+ components: {\n+ 'comp-with-slot': {\n+ template: `\n+ <div>\n+ <slot />\n+ </div>\n+ `\n+ }\n+ }\n+ }\n+ }\n+ }).$mount()\n+ const box = vm.$children[0]\n+ const compWithSlot = box.$children[0]\n+ expect(box.$vnode.ns).toBe('svg')\n+ expect(box._vnode.tag).toBe('foreignObject')\n+ expect(box._vnode.ns).toBe('svg')\n+ expect(compWithSlot.$vnode.ns).toBeUndefined()\n+ expect(compWithSlot._vnode.tag).toBe('div')\n+ expect(compWithSlot._vnode.ns).toBeUndefined()\n+ expect(compWithSlot._vnode.children[0].tag).toBe('p')\n+ expect(compWithSlot._vnode.children[0].ns).toBeUndefined()\n+ expect(compWithSlot._vnode.children[1].tag).toBe('svg')\n+ expect(compWithSlot._vnode.children[1].ns).toBe('svg')\n+ })\n+\n // #6642\n it('render svg foreignObject component with correct namespace', () => {\n const vm = new Vue({", "filename": "test/unit/modules/vdom/create-element.spec.ts", "status": "modified" } ] }
{ "body": "### Version\r\n2.7.14\r\n\r\n### Reproduction link\r\n[codesandbox.io](https://codesandbox.io/s/style-merge-9hltuv)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\n\r\nclick mStyle button twice\r\n\r\n### What is expected?\r\nshow red border-right and blue border-bottom\r\n\r\n### What is actually happening?\r\nonly show red border-right. is not similar as setAttribute but vue3 is similar like setAttribute\r\n\r\n---\r\nis this a bug?\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "@posva \r\nWhat he means is that the results of the two operations are inconsistent, but works fine in `vue-core`.\r\nThis might be a bug.", "created_at": "2022-12-15T08:59:22Z" }, { "body": "Right, I misread", "created_at": "2022-12-15T10:23:34Z" }, { "body": "can I take this bug and try fixing it?", "created_at": "2022-12-16T10:15:03Z" }, { "body": "After the border style is set for the element, the browser will automatically format the style when the border-* style is set. The code is optimized for style reuse, resulting in bugs\r\n![image](https://user-images.githubusercontent.com/49704630/208117297-f3ac4e88-671a-44e6-8712-60da9a4eeff2.png)\r\nAfter adding border-bottom\r\n![image](https://user-images.githubusercontent.com/49704630/208117863-49658a9b-0174-4997-a075-59a564725ade.png)\r\n", "created_at": "2022-12-16T14:20:28Z" } ], "number": 12901, "title": "bind style merge not as expected" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\nfix #12901 \r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [ ] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [ ] It's submitted to the `main` branch for v2.x (or to a previous version branch)\r\n- [ ] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 12946, "review_comments": [], "title": "fix: correctly update styles" }
{ "commits": [ { "message": "fix: correctly update styles" }, { "message": "chore: update test" } ], "files": [ { "diff": "@@ -91,7 +91,7 @@ function updateStyle(oldVnode: VNodeWithData, vnode: VNodeWithData) {\n }\n for (name in newStyle) {\n cur = newStyle[name]\n- if (cur !== oldStyle[name]) {\n+ if (cur !== oldStyle[name] || !el.style[name]) {\n // ie9 setting to null has no effect, must use empty string\n setProp(el, name, cur == null ? '' : cur)\n }", "filename": "src/platforms/web/runtime/modules/style.ts", "status": "modified" }, { "diff": "@@ -28,6 +28,14 @@ describe('vdom style module', () => {\n expect(elm.style.display).toBe('block')\n })\n \n+ it('border related style should update correctly', () => {\n+ const vnode1 = new VNode('p', { style: { border: '10px solid red', 'border-bottom': '10px solid blue' } })\n+ const vnode2 = new VNode('p', { style: { 'border-right': '10px solid red', 'border-bottom': '10px solid blue' } })\n+ patch(null, vnode1)\n+ const elm = patch(vnode1, vnode2)\n+ expect(elm.style.borderBottom).toBe('10px solid blue')\n+ })\n+\n it('should remove elements attrs', () => {\n const vnode1 = new VNode('p', { style: { fontSize: '12px' } })\n const vnode2 = new VNode('p', { style: { display: 'block' } })", "filename": "test/unit/modules/vdom/modules/style.spec.ts", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n\r\n- Functional components: [https://github.com/sin1ght/test](https://github.com/sin1ght/test)\r\n- Regular components and slots: https://codesandbox.io/embed/vue-template-3pnsx\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nAfter npm run serve, click the toggle button and find that child has no style.\r\n\r\nChild and child2 components are reused, child's data-v-* disappears, causing the style to disappear\r\n\r\n### What is expected?\r\n\r\nChild should have a black background scope style\r\n\r\n### What is actually happening?\r\n\r\nChild without style\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "not only functional components,use functional components just to make sameVnode true,a functional component and a normal dom is ok as well", "created_at": "2019-08-20T21:53:27Z" }, { "body": "i add two lines code in patchVnode,and it work will at my demo,but i'm not sure\r\n\r\n`oldVnode.fnScopeId = vnode.fnScopeId;\r\n setScope(oldVnode);`\r\n\r\n![mt44X9.png](https://s2.ax1x.com/2019/08/21/mt44X9.png)", "created_at": "2019-08-21T03:10:42Z" }, { "body": "I tested with the 4 possible combinations among regular and functional components and the problem only appears when both are functional\r\n\r\nIf you are up to give the fix a try, you can. If other things break, you should see tests failing", "created_at": "2019-08-21T10:13:05Z" }, { "body": "if samaVnode true then patchVnode\r\n\r\n![mUrTBT.png](https://s2.ax1x.com/2019/08/21/mUrTBT.png)\r\n\r\nregular component's tag will changed,but functional component not\r\n\r\n![mUrxjx.png](https://s2.ax1x.com/2019/08/21/mUrxjx.png)\r\n\r\nso,a normal dom is ok as well\r\n\r\n```javascript\r\n//parent.vue\r\n\r\n<template>\r\n <div class=\"parent\">\r\n <Child class=\"child\" v-if=\"!test\"/>\r\n <div class=\"child2\" v-if=\"test\">\r\n </div>\r\n</template>\r\n```\r\n\r\n", "created_at": "2019-08-21T11:00:50Z" }, { "body": "similar bug, affected pure normal components with slot, please see my simple demo.\r\n\r\nhttps://codesandbox.io/s/vue-template-jimot", "created_at": "2019-08-25T15:45:12Z" }, { "body": "@CzBiX \r\nI think your problem is same with me.\r\ntwo child reused but scopedId not be copied .\r\n![mRH28I.png](https://s2.ax1x.com/2019/08/26/mRH28I.png)", "created_at": "2019-08-26T02:45:12Z" }, { "body": "@sin1ght It's weird I couldn't see the problem in your repro using regular components but others' repro do show so I adapted the title. Sorry about that!", "created_at": "2019-08-29T11:51:57Z" }, { "body": "Hi, I'm interested interested in this particular task, how to I go ahead", "created_at": "2022-12-17T05:35:13Z" }, { "body": "@posva I submitted a pull request for this issue #12938 . But I noticed the requirements state that I should merge to main instead of dev, even though the Contributing Guide says I shouldn't.\r\n\r\nShould I make one for main instead?", "created_at": "2023-01-18T21:58:51Z" } ], "number": 10416, "title": "Scoped CSS attribute is reused or discarded when switching between components with scoped CSS" }
{ "body": "When patching a node the scope id might change. When it does the flow should be followed for updating non-same elements to make sure it is updated correctly in the DOM.\r\n\r\nfix #10416\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [ ] It's submitted to the `main` branch for v2.x (or to a previous version branch)\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nThere seems to be an issue with the tests that is not related to this change. From what I can see all tests are passing since there are some issues with the tests it might be worth double checking.\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [x] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 12938, "review_comments": [], "title": "fix(patch): Do not consider nodes with different scope ids the same (fix #10416)" }
{ "commits": [ { "message": "fix(patch): Do not consider nodes with different scope ids the same\n\nWhen patching a node the scope id might change. When it does the flow should be followed for\nupdating non-same elements to make sure it is updated correctly in the DOM.\n\nfix #10416" } ], "files": [ { "diff": "@@ -40,7 +40,8 @@ function sameVnode (a, b) {\n a.tag === b.tag &&\n a.isComment === b.isComment &&\n isDef(a.data) === isDef(b.data) &&\n- sameInputType(a, b)\n+ sameInputType(a, b) &&\n+ findScopeId(a) === findScopeId(b)\n ) || (\n isTrue(a.isAsyncPlaceholder) &&\n isUndef(b.asyncFactory.error)\n@@ -67,6 +68,20 @@ function createKeyToOldIdx (children, beginIdx, endIdx) {\n return map\n }\n \n+function findScopeId (node) {\n+ if (isDef(node.fnScopeId)) {\n+ return node.fnScopeId\n+ }\n+\n+ let ancestor = node\n+ while (ancestor) {\n+ if (isDef(ancestor.context) && isDef(ancestor.context.$options._scopeId)) {\n+ return ancestor.context.$options._scopeId\n+ }\n+ ancestor = ancestor.parent\n+ }\n+}\n+\n export function createPatchFunction (backend) {\n let i, j\n const cbs = {}", "filename": "src/core/vdom/patch.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.14\r\n\r\n### Reproduction link\r\n[https://jsfiddle.net/5w1ydeu8/3/](https://jsfiddle.net/5w1ydeu8/3/)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\n- Click between A and B => works\r\n- Click between A and C => works\r\n- Click between B and C => doesn't work\r\n\r\n### What is expected?\r\nThe scoped slot should update.\r\n\r\n### What is actually happening?\r\nThe scoped slot doesn't update.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "Use `v-if` as a workaround:\r\n``` js\r\n <Foo v-if=\"frame === 'a'\">\r\n <template #default=\"x\">\r\n a\r\n </template>\r\n </Foo>\r\n <Foo v-if=\"frame === 'b'\">\r\n <template #default=\"x\">\r\n b\r\n </template>\r\n </Foo>\r\n <Foo v-if=\"frame === 'c'\">\r\n <template #default=\"x\">\r\n c\r\n </template>\r\n </Foo>\r\n```\r\n\r\nBut it does have something wrong with `v-else-if`, I'm looking into it.", "created_at": "2021-08-17T03:11:18Z" }, { "body": "@JuniorTour Thanks, I'm aware of various workarounds (like using `key`), but I thought this behavior was odd.", "created_at": "2021-08-17T03:23:22Z" } ], "number": 12223, "title": "v-if not patching component scoped slot correctly" }
{ "body": "fix #12223 \r\nfix #12922\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `main` branch for v2.x (or to a previous version branch)\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 12928, "review_comments": [], "title": "fix(compiler): scoped slot can't udate with with different slot content" }
{ "commits": [ { "message": "fix(compiler): scoped slot can't udate with with different slot content" } ], "files": [ { "diff": "@@ -432,7 +432,7 @@ function genScopedSlots(\n // it's possible for the same component to be reused but with different\n // compiled slot content. To avoid that, we generate a unique key based on\n // the generated code of all the slot contents.\n- let needsKey = !!el.if\n+ let needsKey = !!(el.if || el.elseif || el.else)\n \n // OR when it is inside another scoped slot or v-for (the reactivity may be\n // disconnected due to the intermediate scope variable)\n@@ -449,7 +449,7 @@ function genScopedSlots(\n needsForceUpdate = true\n break\n }\n- if (parent.if) {\n+ if (parent.if || parent.elseif) {\n needsKey = true\n }\n parent = parent.parent", "filename": "src/compiler/codegen/index.ts", "status": "modified" }, { "diff": "@@ -1267,7 +1267,7 @@ describe('Component scoped slot', () => {\n }).then(done)\n })\n \n- // #9534\n+ // #9534 #12922\n it('should detect conditional reuse with different slot content', done => {\n const Foo = {\n template: `<div><slot :n=\"1\" /></div>`\n@@ -1276,25 +1276,44 @@ describe('Component scoped slot', () => {\n const vm = new Vue({\n components: { Foo },\n data: {\n- ok: true\n+ value: 'a'\n },\n template: `\n <div>\n- <div v-if=\"ok\">\n+ <div v-if=\"value==='a'\">\n <foo v-slot=\"{ n }\">{{ n }}</foo>\n </div>\n- <div v-if=\"!ok\">\n+ <div v-else-if=\"value==='b'\">\n <foo v-slot=\"{ n }\">{{ n + 1 }}</foo>\n </div>\n+ <div v-else-if=\"value==='c'\">\n+ <foo v-slot=\"{ n }\">{{ n + 2 }}</foo>\n+ </div>\n+ <div v-else>\n+ <foo v-slot=\"{ n }\">{{ n + 3 }}</foo>\n+ </div>\n </div>\n `\n }).$mount()\n \n expect(vm.$el.textContent.trim()).toBe(`1`)\n- vm.ok = false\n+ vm.value = 'b'\n waitForUpdate(() => {\n expect(vm.$el.textContent.trim()).toBe(`2`)\n- }).then(done)\n+ })\n+ .then(() => {\n+ vm.value = 'c'\n+ })\n+ .then(() => {\n+ expect(vm.$el.textContent.trim()).toBe(`3`)\n+ })\n+ .then(() => {\n+ vm.value = 'd'\n+ })\n+ .then(() => {\n+ expect(vm.$el.textContent.trim()).toBe(`4`)\n+ })\n+ .then(done)\n })\n \n // #9644\n@@ -1403,4 +1422,42 @@ describe('Component scoped slot', () => {\n expect(parent.$el.textContent).toMatch(``)\n }).then(done)\n })\n+ // #12223\n+ it('should update when switching between components with slot', done => {\n+ const Foo = {\n+ template: `<div><slot :n=\"1\" /></div>`\n+ }\n+\n+ const vm = new Vue({\n+ template: `<div>\n+ <Foo v-if=\"value==='a'\"><template v-slot=\"{ n }\">{{ n }}</template></Foo>\n+ <Foo v-else-if=\"value==='b'\"><template v-slot=\"{ n }\">{{ n + 1 }}</template></Foo>\n+ <Foo v-else-if=\"value==='c'\"><template v-slot=\"{ n }\">{{ n + 2 }}</template></Foo>\n+ <Foo v-else><template v-slot=\"{ n}\">{{ n + 3 }}</template></Foo>\n+ </div>`,\n+ data: {\n+ value: 'a'\n+ },\n+ components: { Foo }\n+ }).$mount()\n+\n+ expect(vm.$el.textContent.trim()).toBe(`1`)\n+ vm.value = 'b'\n+ waitForUpdate(() => {\n+ expect(vm.$el.textContent.trim()).toBe(`2`)\n+ })\n+ .then(() => {\n+ vm.value = 'c'\n+ })\n+ .then(() => {\n+ expect(vm.$el.textContent.trim()).toBe(`3`)\n+ })\n+ .then(() => {\n+ vm.value = 'd'\n+ })\n+ .then(() => {\n+ expect(vm.$el.textContent.trim()).toBe(`4`)\n+ })\n+ .then(done)\n+ })\n })", "filename": "test/unit/features/component/component-scoped-slot.spec.ts", "status": "modified" } ] }
{ "body": "### Version\r\n2.7.7\r\n\r\n### Reproduction link\r\n[github.com](https://github.com/Shimada666/vue27_ref)\r\n\r\n<img width=\"769\" alt=\"image\" src=\"https://user-images.githubusercontent.com/33391300/180020544-4f31b277-4963-4c8f-bddc-3005cb5c0611.png\">\r\n\r\n<img width=\"912\" alt=\"image\" src=\"https://user-images.githubusercontent.com/33391300/180020673-6357ce68-3435-4580-ac27-4027f37bd088.png\">\r\n\r\n\r\n\r\n### Steps to reproduce\r\n1. Clone the repo\r\n2. Run `pnpm i`\r\n3. Run `pnpm dev` \r\n\r\n\r\n### What is expected?\r\nKebab-case component can be rendered.\r\n\r\n### What is actually happening?\r\nKebab-case component can't be rendered. But when I delete ref sentence, it works.\r\n\r\n---\r\nIs it a bug? \r\nIt can work in vue3.\r\nIt also can work in vue2.6 + vue-composition-api.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "```vue\r\n<template>\r\n <div>\r\n <p>I'm parent.</p>\r\n\r\n <p>This can work: {{right}} -> (<ChildPage/>)</p>\r\n <p>This can't work: {{wrong}} -> (<child-page/>)</p>\r\n\r\n <p>Because I coded 'const childPage = ref(null)'. If I delete it, they both can show.</p>\r\n </div>\r\n</template>\r\n<script setup>\r\nimport ChildPage from './ChildPage.vue'\r\nimport { ref } from 'vue'\r\n\r\nconst right = ref(`<ChildPage/>`)\r\nconst wrong = ref(`<child-page/>`)\r\n\r\nconst childPage = ref(null) // If I delete it, they both can show.\r\n</script>\r\n<style></style>\r\n\r\n```", "created_at": "2022-07-20T15:26:11Z" } ], "number": 12685, "title": "kabab-case component doesn't work when I typed same name but camelcase ref." }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [ ] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n\r\nclose #12685\r\n", "number": 12687, "review_comments": [], "title": "fix(codegen): improve resolve component name in <script setup>" }
{ "commits": [ { "message": "fix(codegen): improve resolve component name in <script setup>" }, { "message": "clean" }, { "message": "rebase" }, { "message": "rebase" }, { "message": "chore: improve code" } ], "files": [ { "diff": "@@ -13,7 +13,7 @@ import {\n ASTText,\n CompilerOptions\n } from 'types/compiler'\n-import { BindingMetadata } from 'sfc/types'\n+import { BindingMetadata, BindingTypes } from 'sfc/types'\n \n type TransformFunction = (el: ASTElement, code: string) => string\n type DataGenFunction = (el: ASTElement) => string\n@@ -104,10 +104,7 @@ export function genElement(el: ASTElement, state: CodegenState): string {\n // check if this is a component in <script setup>\n const bindings = state.options.bindings\n if (maybeComponent && bindings && bindings.__isScriptSetup !== false) {\n- tag =\n- checkBindingType(bindings, el.tag) ||\n- checkBindingType(bindings, camelize(el.tag)) ||\n- checkBindingType(bindings, capitalize(camelize(el.tag)))\n+ tag = checkBindingType(bindings, el.tag)\n }\n if (!tag) tag = `'${el.tag}'`\n \n@@ -127,9 +124,32 @@ export function genElement(el: ASTElement, state: CodegenState): string {\n }\n \n function checkBindingType(bindings: BindingMetadata, key: string) {\n- const type = bindings[key]\n- if (type && type.startsWith('setup')) {\n- return key\n+ const camelName = camelize(key)\n+ const PascalName = capitalize(camelName)\n+ const checkType = (type) => {\n+ if (bindings[key] === type) {\n+ return key\n+ }\n+ if (bindings[camelName] === type) {\n+ return camelName\n+ }\n+ if (bindings[PascalName] === type) {\n+ return PascalName\n+ }\n+ }\n+ const fromConst =\n+ checkType(BindingTypes.SETUP_CONST) ||\n+ checkType(BindingTypes.SETUP_REACTIVE_CONST)\n+ if (fromConst) {\n+ return fromConst\n+ }\n+\n+ const fromMaybeRef =\n+ checkType(BindingTypes.SETUP_LET) ||\n+ checkType(BindingTypes.SETUP_REF) ||\n+ checkType(BindingTypes.SETUP_MAYBE_REF)\n+ if (fromMaybeRef) {\n+ return fromMaybeRef\n }\n }\n ", "filename": "src/compiler/codegen/index.ts", "status": "modified" } ] }
{ "body": "### Version\r\n2.7.0-beta.2\r\n\r\n### Reproduction link\r\n[stackblitz.com](https://stackblitz.com/edit/vitejs-vite-ytrb9c?file=src%2FApp.vue&terminal=dev)\r\n\r\n\r\n\r\n### Steps to reproduce\r\n```ts\r\nimport { defineComponent, isReactive, toRefs } from 'vue';\r\n\r\nexport default defineComponent({\r\n props: {\r\n foo: String,\r\n },\r\n setup(props) {\r\n console.log(props, isReactive(props));\r\n\r\n // toRefs() expects a reactive object but received a plain one.\r\n console.log(toRefs(props, 'foo'));\r\n },\r\n});\r\n```\r\n\r\n### What is expected?\r\n`props` is reactive.\r\n\r\n### What is actually happening?\r\n`props` is not reactive.\r\n\r\n---\r\n\r\nP.S. `createApp` doesn't exist now. Will it be added in the final version of 2.7?\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [], "number": 12561, "title": "props is not reactive in 2.7.0-beta.2" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\n**Other information:**\r\n\r\nclose #12561\r\nfix(core): props is not reactive", "number": 12562, "review_comments": [], "title": "fix(core): props is not reactive" }
{ "commits": [ { "message": "fix(core): props is not reactive" } ], "files": [ { "diff": "@@ -116,6 +116,9 @@ function initProps(vm: Component, propsOptions: Object) {\n proxy(vm, `_props`, key)\n }\n }\n+ // observe prop\n+ const ob = observe(vm._props)\n+ ob && ob.vmCount++\n toggleObserving(true)\n }\n ", "filename": "src/core/instance/state.ts", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.11\r\n\r\n### Reproduction link\r\n[https://jsfiddle.net/CasperDai/mred18vo/7/](https://jsfiddle.net/CasperDai/mred18vo/7/)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nclick 'change'\r\n\r\n### What is expected?\r\nel.style.display is 'flex'\r\n\r\n### What is actually happening?\r\nel.style.display is ''\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "Use a `key` on the `<div>` as a workaround.", "created_at": "2021-03-29T11:39:58Z" }, { "body": "> Use a `key` on the `<div>` as a workaround.\r\nExcuse me, how should I operate?\r\n", "created_at": "2021-05-02T13:46:16Z" }, { "body": "@scientistzjf \r\n\r\nIt’s usually best to use `key` with `v-if` + `v-else`, if they are the same element type (e.g. both `<div>` elements).\r\n\r\nBy default, Vue updates the DOM as efficiently as possible. That means when switching between elements of the same type, it simply patches the existing element, rather than removing it and adding a new one in its place. This can have unintended consequences if these elements should not actually be considered the same.\r\n\r\n### Good\r\n```html\r\n<div\r\n v-if=\"error\"\r\n key=\"search-status\"\r\n>\r\n Error: {{ error }}\r\n</div>\r\n<div\r\n v-else\r\n key=\"search-results\"\r\n>\r\n {{ results }}\r\n</div>\r\n```\r\n<hr>\r\n\r\n### Bad\r\n```html\r\n<div v-if=\"error\">\r\n Error: {{ error }}\r\n</div>\r\n<div v-else>\r\n {{ results }}\r\n</div>\r\n```", "created_at": "2021-05-03T21:14:40Z" }, { "body": "@scientistzjf \r\n\r\nuse key like below\r\n\r\n```vue\r\n<div id=\"app\">\r\n <div>\r\n <div @click=\"click\">change</div>\r\n <div v-if=\"state === 0\">\r\n <div key='first-key' v-show=\"hasChildren\">some data</div>\r\n </div>\r\n <div v-else>\r\n <div ref=\"t\" key='second-key' style=\"display: flex;\">display: {{ val }}</div>\r\n </div>\r\n </div>\r\n</div>\r\n```\r\n", "created_at": "2021-07-05T12:41:33Z" }, { "body": "Is any working solution??", "created_at": "2021-09-10T13:59:03Z" }, { "body": "Use a key on the div", "created_at": "2023-01-22T15:36:02Z" } ], "number": 11984, "title": "<v-show> style.display is incorrect when reusing the elm" }
{ "body": "fix #11984\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 12344, "review_comments": [ { "body": "I think that if the style is set to anything manually via `style=\"display: none\"` or with `:style`, it should probably keep it as well", "created_at": "2021-11-03T18:45:01Z" } ], "title": "fix(v-show): display is not correct after v-show unbind (fix #11984)" }
{ "commits": [ { "message": "fix(v-show): display is not correct after v-show unbind\n\nfix #11984" } ], "files": [ { "diff": "@@ -53,7 +53,7 @@ export default {\n oldVnode: VNodeWithData,\n isDestroy: boolean\n ) {\n- if (!isDestroy) {\n+ if (!isDestroy && el.style.display === 'none') {\n el.style.display = el.__vOriginalDisplay\n }\n }", "filename": "src/platforms/web/runtime/directives/show.js", "status": "modified" }, { "diff": "@@ -81,4 +81,32 @@ describe('Directive v-show', () => {\n expect(vm.$el.firstChild.style.display).toBe('none')\n }).then(done)\n })\n+\n+ it('should set display correctly when unbind with falsy', done => {\n+ const vm = new Vue({\n+ template:\n+ '<div v-if=\"tester\" v-show=\"false\" style=\"display: block\"></div>' +\n+ '<div v-else style=\"display: flex\"></div>',\n+ data: { tester: true }\n+ }).$mount()\n+ expect(vm.$el.style.display).toBe('none')\n+ vm.tester = false\n+ waitForUpdate(() => {\n+ expect(vm.$el.style.display).toBe('flex')\n+ }).then(done)\n+ })\n+\n+ it('should set display correctly when unbind with truthy', done => {\n+ const vm = new Vue({\n+ template:\n+ '<div v-if=\"tester\" v-show=\"true\"></div>' +\n+ '<div v-else style=\"display: flex\"></div>',\n+ data: { tester: true }\n+ }).$mount()\n+ expect(vm.$el.style.display).toBe('')\n+ vm.tester = false\n+ waitForUpdate(() => {\n+ expect(vm.$el.style.display).toBe('flex')\n+ }).then(done)\n+ })\n })", "filename": "test/unit/features/directives/show.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.14\r\n\r\n### Reproduction link\r\n\r\nhttps://template-explorer.vuejs.org/#%3Cbutton%20%40keydown.ctrl.shift.alt.meta.exact%3D%22console.log(%24event)%22%3EClick%3C%2Fbutton%3E\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nuse `v-on` with `.ctrl.shift.alt.meta.exact` modifiers\r\n\r\n### What is expected?\r\nadded event listener successfully\r\n\r\n### What is actually happening?\r\n[Vue warn]: Failed to generate render function:\r\nSyntaxError: Unexpected token ')' in\r\n\r\n---\r\ncompiler generates `if()return null;` that's a syntax error.\r\n```javascript\r\n else if (key === 'exact') {\r\n var modifiers = (handler.modifiers);\r\n genModifierCode += genGuard(\r\n ['ctrl', 'shift', 'alt', 'meta']\r\n .filter(function (keyModifier) { return !modifiers[keyModifier]; })\r\n .map(function (keyModifier) { return (\"$event.\" + keyModifier + \"Key\"); })\r\n .join('||')\r\n );\r\n }\r\n```\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "This should be an easy fix in the compiler: avoiding adding that last `if` when the condition is empty", "created_at": "2021-10-11T15:47:46Z" } ], "number": 12319, "title": ".exact modifier : Failed to generate render function" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\nfix #12319\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 12321, "review_comments": [], "title": "fix(compiler): skip if guard when condition is empty after modifiers" }
{ "commits": [ { "message": "fix(compiler): do not add if guard when condition is empty" }, { "message": "fix(compiler): remove semicolon" }, { "message": "fix(compiler): add unit test" } ], "files": [ { "diff": "@@ -130,12 +130,11 @@ function genHandler (handler: ASTElementHandler | Array<ASTElementHandler>): str\n }\n } else if (key === 'exact') {\n const modifiers: ASTModifiers = (handler.modifiers: any)\n- genModifierCode += genGuard(\n- ['ctrl', 'shift', 'alt', 'meta']\n+ const condition = ['ctrl', 'shift', 'alt', 'meta']\n .filter(keyModifier => !modifiers[keyModifier])\n .map(keyModifier => `$event.${keyModifier}Key`)\n .join('||')\n- )\n+ if (condition) genModifierCode += genGuard(condition)\n } else {\n keys.push(key)\n }", "filename": "src/compiler/codegen/events.js", "status": "modified" }, { "diff": "@@ -430,6 +430,14 @@ describe('codegen', () => {\n )\n })\n \n+ // GitHub Issues #12319\n+ it('generate events with ctrl shift alt meta exact modifiers are applied together', () => {\n+ assertCodegen(\n+ '<button @keydown.ctrl.shift.alt.meta.exact=\"onClick\">Click</button>',\n+ `with(this){return _c('button',{on:{\"keydown\":function($event){if(!$event.ctrlKey)return null;if(!$event.shiftKey)return null;if(!$event.altKey)return null;if(!$event.metaKey)return null;return onClick.apply(null, arguments)}}},[_v(\"Click\")])}`\n+ )\n+ })\n+\n it('generate events with mouse event modifiers', () => {\n assertCodegen(\n '<input @click.ctrl=\"onClick\">',", "filename": "test/unit/modules/compiler/codegen.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.14\r\n\r\n### Reproduction link\r\n[https://github.com/hivokas/vue-2-slots-issue](https://github.com/hivokas/vue-2-slots-issue)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\n1. Clone the repo\r\n2. Run `npm run serve`\r\n3. Change the value in the text input\r\n\r\n### What is expected?\r\nPreview updates with both old and new slot syntax (here: https://github.com/hivokas/vue-2-slots-issue/blob/cd7f3a6db790472b88421a1497ae14d4135d23ef/src/components/PublishToSocialStep.vue#L4).\r\n\r\n### What is actually happening?\r\nPreview updates with old slot syntax, but doesn't update with new slot syntax.\r\n\r\nVideo demonstration of the problem:\r\n\r\nhttps://user-images.githubusercontent.com/22997803/130232152-bd5e445d-600e-4816-b140-685d1fd6c3a8.mp4\r\n\r\n\r\n\r\n---\r\nI've updated the old deprecated slot syntax to the new one.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "Hey, thanks for the detailed report. On first sight this might rather be a bug with a special combination of slot usage. We'll investigate.", "created_at": "2021-08-20T12:47:57Z" }, { "body": "Online Reproduction link: https://jsbin.com/sakiwam/edit?html,output", "created_at": "2021-08-29T14:13:25Z" }, { "body": "> Hey, thanks for the detailed report. On first sight this might rather be a bug with a special combination of slot usage. We'll investigate.\r\n\r\nAs I investigated, the problem is inside `src/compiler/parser/index.js` inside `processSlotContent` method, I did not have enough time to fix it, but `rawAttrsMap` is doing wrong behaviour. I am new in Vue, so I just looked at it for 1 hour and it was my first time, can you guid me about this variable? ", "created_at": "2021-11-03T10:10:00Z" }, { "body": "We ran into this recently.\r\n\r\nWe are using different names and it still doesn't work with the `#` syntax.", "created_at": "2023-07-12T23:44:08Z" } ], "number": 12232, "title": "New slot syntax doesn't work the same way as old syntax (in some cases)" }
{ "body": "\r\n| | Before Fix | After Fix |\r\n|---|---|---|\r\n| Issue | Fix #12232 |\r\n| Reproduction Link | https://jsbin.com/sakiwam/edit?html,output | https://jsbin.com/rexakik/edit?html,output\r\n| Screenshot | ![07c1d1a2d9f45bbe4714417e951d86a](https://user-images.githubusercontent.com/14243906/132544129-a467ff7e-838a-46da-95a8-0838be77e7c8.png) | ![58e5456a8458ee88724aab0a8d54187](https://user-images.githubusercontent.com/14243906/132544158-96e33863-a3a6-4af3-a32a-ecc5b2779858.png) \r\n| Issue | Fix #12245 |\r\n| Reproduction Link | https://codesandbox.io/s/vue-2-x-reactivity-broken-tqogh | https://jsbin.com/gatanir/edit?html,output\r\n| Screenshot | ![edb6f72f5584eeb0c27612cc4de0bd7](https://user-images.githubusercontent.com/14243906/132544462-f8ab38f2-1450-4f36-988f-aba7dd53982b.png) | ![62643d55033a817bddbf8bbef0d46af](https://user-images.githubusercontent.com/14243906/132544489-bb472be5-8e1a-4833-982d-91e652d150b0.png) \r\n\r\n\r\n\r\n\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 12265, "review_comments": [], "title": "fix(compiler): nested scoped slot not update (fix #12232, #12245)" }
{ "commits": [ { "message": "fix(compiler): nested scoped slot not update (fix #12232, #12245)" } ], "files": [ { "diff": "@@ -430,7 +430,14 @@ function containsSlotChild (el: ASTNode): boolean {\n if (el.tag === 'slot') {\n return true\n }\n- return el.children.some(containsSlotChild)\n+ // #12232, #12245: nested scoped slot should update\n+ const childrenIsDynamic = el.children.some(containsSlotChild)\n+ if (childrenIsDynamic) {\n+ return childrenIsDynamic\n+ } else if (el.scopedSlots) {\n+ const scopedSlots = el.scopedSlots\n+ return Object.keys(scopedSlots).some(key => containsSlotChild(scopedSlots[key]))\n+ }\n }\n return false\n }", "filename": "src/compiler/codegen/index.js", "status": "modified" }, { "diff": "@@ -1026,6 +1026,72 @@ describe('Component scoped slot', () => {\n }).then(done)\n })\n \n+ // #12232, #12245\n+ it('nested named scoped slots should update', done => {\n+ const initialText = 'initial'\n+ const scopedSlotContent = 'scopedSlot'\n+\n+ const inner = {\n+ template: `<div><slot v-bind=\"{ user: '${scopedSlotContent}' }\"/></div>`\n+ }\n+\n+ const innerContainer = {\n+ template: `<div><slot name=\"content\"/></div>`\n+ }\n+\n+ const wrapper = {\n+ components: { inner, innerContainer },\n+ name: 'wrapper',\n+ template: `\n+ <inner v-slot=\"{ user }\">\n+ <innerContainer>\n+ <template #content>\n+ <div>\n+ <span>{{ user }}</span>\n+ <slot/>\n+ </div>\n+ </template>\n+ </innerContainer>\n+ </inner>\n+ `\n+ }\n+\n+ const outer = {\n+ components: { wrapper },\n+ template: `\n+ <wrapper>\n+ <form>\n+ <span>{{ text }}</span>\n+ <input v-model=\"text\" type=\"text\"/>\n+ </form>\n+ </wrapper>\n+ `,\n+ data() {\n+ return {\n+ text: initialText,\n+ }\n+ },\n+ }\n+\n+ const vm = new Vue({\n+ components: { outer },\n+ template: `<outer ref=\"outer\"></outer>`\n+ }).$mount()\n+\n+ expect(vm.$el.textContent).toBe(`${scopedSlotContent} ${initialText} `)\n+\n+ const newValue = 'newValue'\n+ vm.$refs.outer.text = newValue\n+ const input = vm.$el.querySelector('input')\n+ input.value = newValue\n+ triggerEvent(input, 'input')\n+\n+ waitForUpdate(() => {\n+ expect(vm.$el.textContent).toBe(`${scopedSlotContent} ${newValue} `)\n+ expect(input.value).toBe(newValue)\n+ }).then(done)\n+ })\n+\n it('dynamic v-bind arguments on <slot>', done => {\n const Foo = {\n data() {", "filename": "test/unit/features/component/component-scoped-slot.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.14\r\n\r\n### Reproduction link\r\n[https://jsfiddle.net/5w1ydeu8/3/](https://jsfiddle.net/5w1ydeu8/3/)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\n- Click between A and B => works\r\n- Click between A and C => works\r\n- Click between B and C => doesn't work\r\n\r\n### What is expected?\r\nThe scoped slot should update.\r\n\r\n### What is actually happening?\r\nThe scoped slot doesn't update.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "Use `v-if` as a workaround:\r\n``` js\r\n <Foo v-if=\"frame === 'a'\">\r\n <template #default=\"x\">\r\n a\r\n </template>\r\n </Foo>\r\n <Foo v-if=\"frame === 'b'\">\r\n <template #default=\"x\">\r\n b\r\n </template>\r\n </Foo>\r\n <Foo v-if=\"frame === 'c'\">\r\n <template #default=\"x\">\r\n c\r\n </template>\r\n </Foo>\r\n```\r\n\r\nBut it does have something wrong with `v-else-if`, I'm looking into it.", "created_at": "2021-08-17T03:11:18Z" }, { "body": "@JuniorTour Thanks, I'm aware of various workarounds (like using `key`), but I thought this behavior was odd.", "created_at": "2021-08-17T03:23:22Z" } ], "number": 12223, "title": "v-if not patching component scoped slot correctly" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n\r\n| | Before Fix #12223 | After Fix #12223 |\r\n|---|---|---|\r\n| Reproduction Link | https://jsfiddle.net/5w1ydeu8/3/ | https://jsfiddle.net/juniortour/mkb698z4/ \r\n| Screenshot | ![image](https://user-images.githubusercontent.com/14243906/130981954-65719d5a-bf08-4ffa-a9db-3fb827ee2746.png) | ![LS8%J1U6XXBBBJ 4C QQ9O2](https://user-images.githubusercontent.com/14243906/130981793-9b321d38-de4f-419c-9f48-664b29840948.png) \r\n\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 12244, "review_comments": [], "title": "fix(compiler): scoped slot with v-else-if not update (fix #12223)" }
{ "commits": [ { "message": "fix(compiler): scoped slot with v-else-if not update (fix #12223)" } ], "files": [ { "diff": "@@ -377,11 +377,11 @@ function genScopedSlots (\n )\n })\n \n- // #9534: if a component with scoped slots is inside a conditional branch,\n+ // #9534, #12223: if a component with scoped slots is inside a conditional branch,\n // it's possible for the same component to be reused but with different\n // compiled slot content. To avoid that, we generate a unique key based on\n // the generated code of all the slot contents.\n- let needsKey = !!el.if\n+ let needsKey = !!(el.if || el.elseif)\n \n // OR when it is inside another scoped slot or v-for (the reactivity may be\n // disconnected due to the intermediate scope variable)", "filename": "src/compiler/codegen/index.js", "status": "modified" }, { "diff": "@@ -1349,4 +1349,41 @@ describe('Component scoped slot', () => {\n expect(parent.$el.textContent).toMatch(``)\n }).then(done)\n })\n+\n+ // #12223\n+ it('should work with multiple v-else-if', (done) => {\n+ const a = 'a'\n+ const b = 'b'\n+ const c = 'c'\n+\n+ const vm = new Vue({\n+ data: { frame: a },\n+ template: `\n+ <div>\n+ <test v-if=\"frame === '${a}'\">\n+ <template #default=\"x\">${a}</template>\n+ </test>\n+ <test v-else-if=\"frame === '${b}'\">\n+ <template #default=\"x\">${b}</template>\n+ </test>\n+ <test v-else-if=\"frame === '${c}'\">\n+ <template #default=\"x\">${c}</template>\n+ </test>\n+ </div>\n+ `,\n+ components: {\n+ Test: {\n+ template: '<div><slot/></div>'\n+ }\n+ }\n+ }).$mount()\n+\n+ vm.frame = b\n+ waitForUpdate(() => {\n+ expect(vm.$el.innerHTML).toBe(`<div>${b}</div>`)\n+ vm.frame = c\n+ }).then(() => {\n+ expect(vm.$el.innerHTML).toBe(`<div>${c}</div>`)\n+ }).then(done)\n+ })\n })", "filename": "test/unit/features/component/component-scoped-slot.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n\r\nhttps://jsfiddle.net/p0te9vL3/\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\n1. Open dev tools to see console\r\n2. Cycle through the routes\r\n3. View console and observe lifecycle hook logging\r\n\r\n### What is expected?\r\nI'd expect once the `max` cache is reached (in the example case of 1), the component that gets pruned also gets destroyed. In [the documentation](https://vuejs.org/v2/api/#keep-alive) it states (emphasis mine): \r\n\r\n> `max`\r\n>\r\n> The maximum number of component instances to cache. Once this number is reached, the cached component instance that was least recently accessed **will be destroyed** before creating a new instance.\r\n\r\n### What is actually happening?\r\nThe pruned component is never destroyed, it's just removed from the cache. In the example, the `beforeDestroy` hook is never called.\r\n\r\n---\r\nRelated code: https://github.com/vuejs/vue/blob/636c9b4ef17f2062720b677cbbe613f146f4d4db/src/core/components/keep-alive.js#L37-L49\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "It seems not to destroy components when the max is one (in which case using keep-alive is pointless btw). I removed Vue Router from the repro to make it simpler: https://jsfiddle.net/p0te9vL3/ (max is set to 2 to show it works)\r\n\r\nI marking this as a bug for consistency. The workaround would be not using a `keep-alive` if the value for max is `1`", "created_at": "2019-05-04T10:52:21Z" }, { "body": "should we just validate max value just like this?\r\n```js\r\n if (parseInt(this.max) > 1) {\r\n var ref$1 = this;\r\n var cache = ref$1.cache;\r\n var keys = ref$1.keys;\r\n var key = vnode.key == null\r\n // same constructor may get registered as different local components\r\n // so cid alone is not enough (#3269)\r\n ? componentOptions.Ctor.cid + (componentOptions.tag ? (\"::\" + (componentOptions.tag)) : '')\r\n : vnode.key;\r\n if (cache[key]) {\r\n vnode.componentInstance = cache[key].componentInstance;\r\n // make current key freshest\r\n remove(keys, key);\r\n keys.push(key);\r\n } else {\r\n cache[key] = vnode;\r\n keys.push(key);\r\n // prune oldest entry\r\n if (this.max && keys.length > parseInt(this.max)) {\r\n pruneCacheEntry(cache, keys[0], keys, this._vnode);\r\n }\r\n }\r\n \r\n vnode.data.keepAlive = true;\r\n }\r\n```", "created_at": "2019-05-06T02:05:00Z" }, { "body": "@posva maybe I'm confused by the prop, but when I used `:max=\"1\"` I was expecting it to \"keep alive\" the previous component, so it'd always remember the trailing component but would destroy anything past 1", "created_at": "2019-05-06T16:48:40Z" }, { "body": "it will cache 1 component, so as soon as you switch the cache is occupied by the new component and the previous component is removed. A cache of one entry here is the same as not having any cache", "created_at": "2019-05-06T16:52:25Z" }, { "body": "@posva ah that makes more sense, thanks!", "created_at": "2019-05-06T16:59:52Z" }, { "body": "@posva, #9962 already fixes it, i just found out.", "created_at": "2019-05-14T04:01:07Z" }, { "body": "@zrh122 are you sure? I haven't tested but the pr https://github.com/vuejs/vue/pull/10015 introduced a very small change that does not appear in yours. In any case, it would be nice if we could merge both for different issues, so both contributions are taken into account 🙂 ", "created_at": "2019-05-14T12:58:51Z" }, { "body": "> @zrh122 are you sure? I haven't tested but the pr #10015 introduced a very small change that does not appear in yours. In any case, it would be nice if we could merge both for different issues, so both contributions are taken into account 🙂\r\n\r\nYes, in my code i delay to call `pruneCacheEntry` until component mounted or updated, at that time `this._vnode` is equal to variable `vnode`.", "created_at": "2019-05-14T15:09:56Z" }, { "body": "@posva I am using keep-alive to cache some expensive page, like the following.\r\n\r\n` <keep-alive max=\"1\">\r\n <router-view v-if=\"$route.meta.needCache\"></router-view>\r\n </keep-alive>\r\n <router-view v-if=\"!$route.meta.needCache\"></router-view>`\r\n\r\n\r\nHowever, I find out that there is no \"beforeDestroy\" hook for the cached file.\r\nI used \"deactivated hook\", but it seems that there is some minor memory leak. \r\n\r\n", "created_at": "2019-11-22T06:01:42Z" }, { "body": "I wonder if <keep-alive max=\"1\"> can clean up nodes thoroughly.... ", "created_at": "2019-11-22T06:06:07Z" }, { "body": "> It seems not to destroy components when the max is one (in which case using keep-alive is pointless btw). I removed Vue Router from the repro to make it simpler: https://jsfiddle.net/p0te9vL3/ (max is set to 2 to show it works)\r\n> \r\n> I marking this as a bug for consistency. The workaround would be not using a `keep-alive` if the value for max is `1`\r\n\r\n`<keep-alive max=\"1\"> <router-view v-if=\"$route.meta.needCache\"></router-view> </keep-alive> <router-view v-if=\"!$route.meta.needCache\"></router-view>`\r\nI use this to keep only one expensive page, so that user can switch from nonexpensive one to expensive one easily, but one can only cache one expensive page.\r\n", "created_at": "2019-11-22T06:09:46Z" }, { "body": "@posva I wonder if there are any other choices for my situation. ", "created_at": "2019-11-22T06:10:29Z" }, { "body": "> > @zrh122 are you sure? I haven't tested but the pr #10015 introduced a very small change that does not appear in yours. In any case, it would be nice if we could merge both for different issues, so both contributions are taken into account 🙂\r\n> \r\n> Yes, in my code i delay to call `pruneCacheEntry` until component mounted or updated, at that time `this._vnode` is equal to variable `vnode`.\r\n\r\nIt may cause other problem, such as last router will fresh when activated", "created_at": "2021-02-02T09:44:33Z" } ], "number": 9972, "title": "<keep-alive> doesn't `destroy` cached components after when max is 1" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [ ] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [ ] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [ ] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [ ] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\nfixes #9842\r\nFixes https://github.com/vuejs/vue-router/issues/2549\r\nFixes #9972", "number": 12015, "review_comments": [], "title": "fix(keep-alive): cache what is really needed not the whole VNode data" }
{ "commits": [ { "message": "refactor(keep-alive): cache what is really needed not the whole VNode data" }, { "message": "refactor: use method instead of creating a new one per render" } ], "files": [ { "diff": "@@ -3,7 +3,13 @@\n import { isRegExp, remove } from 'shared/util'\n import { getFirstComponentChild } from 'core/vdom/helpers/index'\n \n-type VNodeCache = { [key: string]: ?VNode };\n+type CacheEntry = {\n+ name: ?string;\n+ tag: ?string;\n+ componentInstance: Component;\n+};\n+\n+type CacheEntryMap = { [key: string]: ?CacheEntry };\n \n function getComponentName (opts: ?VNodeComponentOptions): ?string {\n return opts && (opts.Ctor.options.name || opts.tag)\n@@ -24,9 +30,9 @@ function matches (pattern: string | RegExp | Array<string>, name: string): boole\n function pruneCache (keepAliveInstance: any, filter: Function) {\n const { cache, keys, _vnode } = keepAliveInstance\n for (const key in cache) {\n- const cachedNode: ?VNode = cache[key]\n- if (cachedNode) {\n- const name: ?string = getComponentName(cachedNode.componentOptions)\n+ const entry: ?CacheEntry = cache[key]\n+ if (entry) {\n+ const name: ?string = entry.name\n if (name && !filter(name)) {\n pruneCacheEntry(cache, key, keys, _vnode)\n }\n@@ -35,14 +41,14 @@ function pruneCache (keepAliveInstance: any, filter: Function) {\n }\n \n function pruneCacheEntry (\n- cache: VNodeCache,\n+ cache: CacheEntryMap,\n key: string,\n keys: Array<string>,\n current?: VNode\n ) {\n- const cached = cache[key]\n- if (cached && (!current || cached.tag !== current.tag)) {\n- cached.componentInstance.$destroy()\n+ const entry: ?CacheEntry = cache[key]\n+ if (entry && (!current || entry.tag !== current.tag)) {\n+ entry.componentInstance.$destroy()\n }\n cache[key] = null\n remove(keys, key)\n@@ -60,6 +66,26 @@ export default {\n max: [String, Number]\n },\n \n+ methods: {\n+ cacheVNode() {\n+ const { cache, keys, vnodeToCache, keyToCache } = this\n+ if (vnodeToCache) {\n+ const { tag, componentInstance, componentOptions } = vnodeToCache\n+ cache[keyToCache] = {\n+ name: getComponentName(componentOptions),\n+ tag,\n+ componentInstance,\n+ }\n+ keys.push(keyToCache)\n+ // prune oldest entry\n+ if (this.max && keys.length > parseInt(this.max)) {\n+ pruneCacheEntry(cache, keys[0], keys, this._vnode)\n+ }\n+ this.vnodeToCache = null\n+ }\n+ }\n+ },\n+\n created () {\n this.cache = Object.create(null)\n this.keys = []\n@@ -72,6 +98,7 @@ export default {\n },\n \n mounted () {\n+ this.cacheVNode()\n this.$watch('include', val => {\n pruneCache(this, name => matches(val, name))\n })\n@@ -80,6 +107,10 @@ export default {\n })\n },\n \n+ updated () {\n+ this.cacheVNode()\n+ },\n+\n render () {\n const slot = this.$slots.default\n const vnode: VNode = getFirstComponentChild(slot)\n@@ -109,12 +140,9 @@ export default {\n remove(keys, key)\n keys.push(key)\n } else {\n- cache[key] = vnode\n- keys.push(key)\n- // prune oldest entry\n- if (this.max && keys.length > parseInt(this.max)) {\n- pruneCacheEntry(cache, keys[0], keys, this._vnode)\n- }\n+ // delay setting the cache until update\n+ this.vnodeToCache = vnode\n+ this.keyToCache = key\n }\n \n vnode.data.keepAlive = true", "filename": "src/core/components/keep-alive.js", "status": "modified" }, { "diff": "@@ -572,6 +572,73 @@ describe('Component keep-alive', () => {\n }).then(done)\n })\n \n+ it('max=1', done => {\n+ const spyA = jasmine.createSpy()\n+ const spyB = jasmine.createSpy()\n+ const spyC = jasmine.createSpy()\n+ const spyAD = jasmine.createSpy()\n+ const spyBD = jasmine.createSpy()\n+ const spyCD = jasmine.createSpy()\n+\n+ function assertCount (calls) {\n+ expect([\n+ spyA.calls.count(),\n+ spyAD.calls.count(),\n+ spyB.calls.count(),\n+ spyBD.calls.count(),\n+ spyC.calls.count(),\n+ spyCD.calls.count()\n+ ]).toEqual(calls)\n+ }\n+\n+ const vm = new Vue({\n+ template: `\n+ <keep-alive max=\"1\">\n+ <component :is=\"n\"></component>\n+ </keep-alive>\n+ `,\n+ data: {\n+ n: 'aa'\n+ },\n+ components: {\n+ aa: {\n+ template: '<div>a</div>',\n+ created: spyA,\n+ destroyed: spyAD\n+ },\n+ bb: {\n+ template: '<div>bbb</div>',\n+ created: spyB,\n+ destroyed: spyBD\n+ },\n+ cc: {\n+ template: '<div>ccc</div>',\n+ created: spyC,\n+ destroyed: spyCD\n+ }\n+ }\n+ }).$mount()\n+\n+ assertCount([1, 0, 0, 0, 0, 0])\n+ vm.n = 'bb'\n+ waitForUpdate(() => {\n+ // should prune A because max cache reached\n+ assertCount([1, 1, 1, 0, 0, 0])\n+ vm.n = 'cc'\n+ }).then(() => {\n+ // should prune B because max cache reached\n+ assertCount([1, 1, 1, 1, 1, 0])\n+ vm.n = 'bb'\n+ }).then(() => {\n+ // B is recreated\n+ assertCount([1, 1, 2, 1, 1, 1])\n+ vm.n = 'aa'\n+ }).then(() => {\n+ // B is destroyed and A recreated\n+ assertCount([2, 1, 2, 2, 1, 1])\n+ }).then(done)\n+ })\n+\n it('should warn unknown component inside', () => {\n new Vue({\n template: `<keep-alive><foo/></keep-alive>`", "filename": "test/unit/features/component/component-keep-alive.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n[https://codepen.io/aaronbird/pen/JVRKwa](https://codepen.io/aaronbird/pen/JVRKwa)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nI wrote a demo: \r\n```html\r\n<!DOCTYPE html>\r\n<html lang=\"en\">\r\n <head>\r\n <meta charset=\"UTF-8\" />\r\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\" />\r\n <meta http-equiv=\"X-UA-Compatible\" content=\"ie=edge\" />\r\n <title>demo</title>\r\n <style>\r\n .link {\r\n cursor: pointer;\r\n user-select: none;\r\n }\r\n </style>\r\n </head>\r\n <body>\r\n <script src=\"https://unpkg.com/vue/dist/vue.js\"></script>\r\n\r\n <div id=\"app\">\r\n <transition>\r\n <keep-alive include=\"foo\">\r\n <component :is=\"target\" @change=\"onChange\"></component>\r\n </keep-alive>\r\n </transition>\r\n </div>\r\n <script>\r\n const Foo = {\r\n name: \"foo\",\r\n template: `<div class=\"link\" @click=\"$emit('change', 'bar')\">Go to Bar</div>`\r\n };\r\n const Bar = {\r\n name: \"bar\",\r\n template: `<div class=\"link\" @click=\"$emit('change', 'foo')\">Go to Foo</div>`\r\n };\r\n Vue.component(\"foo\", Foo);\r\n Vue.component(\"bar\", Bar);\r\n const app = new Vue({\r\n data: {\r\n target: \"foo\"\r\n },\r\n methods: {\r\n onChange(target) {\r\n this.target = target;\r\n }\r\n }\r\n }).$mount(\"#app\");\r\n </script>\r\n </body>\r\n</html>\r\n```\r\n\r\nThe debugging process is as follows: \r\n1 Click \"Collect garbage\" and \"Take heap snapshot\"(snapshot 1) \r\n2 Click the routing button on the left 10 times, then click \"Collect garbage\" and \"Take heap snapshot\"(snapshot 2) \r\n3 Click the routing button on the left 10 times, then click \"Collect garbage\" and \"Take heap snapshot\"(snapshot 3) \r\n![](https://i.imgur.com/FCe40P6.gif) \r\n\r\nThe generated \"snapshot\" is as follows:\r\n\r\nsnapshot 1:\r\n![](https://i.imgur.com/S49oSJo.png) \r\nsnapshot 2:\r\n![](https://i.imgur.com/X1bbOgU.png) \r\nsnapshot 3:\r\n![](https://i.imgur.com/lLD8z7Y.png)\r\nAfter clicking 100 times: \r\n![](https://i.imgur.com/E0bjppJ.png) \r\n\r\nVueComponent is not being recycled, is this a bug? \r\n\r\n### What is expected?\r\nWhy is this happening, is this a bug?\r\n\r\n### What is actually happening?\r\nComponent will not be cleared when switching components.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "I will take a look this.", "created_at": "2019-04-10T05:36:52Z" }, { "body": "All component is retained by cached vnode’s parent,if we delete parent. then components are recycled.Will dig more", "created_at": "2019-04-13T07:53:16Z" }, { "body": "I also have a memory leak issue, please see : https://github.com/bootstrap-vue/bootstrap-vue/issues/4214\r\n\r\nI tried to make a jsfiddle but not sure how to replicate the caching behaviour.", "created_at": "2019-10-08T10:19:23Z" }, { "body": "Hey, is there any news? is this updates already merged to master branch ? I have the same problem.", "created_at": "2020-01-06T15:05:41Z" }, { "body": "I'd really love to see any update on this issue - there is PR but it seems dead?", "created_at": "2020-01-19T21:57:10Z" }, { "body": "It didn't fixed. Oh m g.", "created_at": "2023-03-02T08:20:51Z" } ], "number": 9842, "title": "Memory leak when using \"transition\" and \"keep-alive\"" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [ ] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [ ] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [ ] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [ ] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\nfixes #9842\r\nFixes https://github.com/vuejs/vue-router/issues/2549\r\nFixes #9972", "number": 12015, "review_comments": [], "title": "fix(keep-alive): cache what is really needed not the whole VNode data" }
{ "commits": [ { "message": "refactor(keep-alive): cache what is really needed not the whole VNode data" }, { "message": "refactor: use method instead of creating a new one per render" } ], "files": [ { "diff": "@@ -3,7 +3,13 @@\n import { isRegExp, remove } from 'shared/util'\n import { getFirstComponentChild } from 'core/vdom/helpers/index'\n \n-type VNodeCache = { [key: string]: ?VNode };\n+type CacheEntry = {\n+ name: ?string;\n+ tag: ?string;\n+ componentInstance: Component;\n+};\n+\n+type CacheEntryMap = { [key: string]: ?CacheEntry };\n \n function getComponentName (opts: ?VNodeComponentOptions): ?string {\n return opts && (opts.Ctor.options.name || opts.tag)\n@@ -24,9 +30,9 @@ function matches (pattern: string | RegExp | Array<string>, name: string): boole\n function pruneCache (keepAliveInstance: any, filter: Function) {\n const { cache, keys, _vnode } = keepAliveInstance\n for (const key in cache) {\n- const cachedNode: ?VNode = cache[key]\n- if (cachedNode) {\n- const name: ?string = getComponentName(cachedNode.componentOptions)\n+ const entry: ?CacheEntry = cache[key]\n+ if (entry) {\n+ const name: ?string = entry.name\n if (name && !filter(name)) {\n pruneCacheEntry(cache, key, keys, _vnode)\n }\n@@ -35,14 +41,14 @@ function pruneCache (keepAliveInstance: any, filter: Function) {\n }\n \n function pruneCacheEntry (\n- cache: VNodeCache,\n+ cache: CacheEntryMap,\n key: string,\n keys: Array<string>,\n current?: VNode\n ) {\n- const cached = cache[key]\n- if (cached && (!current || cached.tag !== current.tag)) {\n- cached.componentInstance.$destroy()\n+ const entry: ?CacheEntry = cache[key]\n+ if (entry && (!current || entry.tag !== current.tag)) {\n+ entry.componentInstance.$destroy()\n }\n cache[key] = null\n remove(keys, key)\n@@ -60,6 +66,26 @@ export default {\n max: [String, Number]\n },\n \n+ methods: {\n+ cacheVNode() {\n+ const { cache, keys, vnodeToCache, keyToCache } = this\n+ if (vnodeToCache) {\n+ const { tag, componentInstance, componentOptions } = vnodeToCache\n+ cache[keyToCache] = {\n+ name: getComponentName(componentOptions),\n+ tag,\n+ componentInstance,\n+ }\n+ keys.push(keyToCache)\n+ // prune oldest entry\n+ if (this.max && keys.length > parseInt(this.max)) {\n+ pruneCacheEntry(cache, keys[0], keys, this._vnode)\n+ }\n+ this.vnodeToCache = null\n+ }\n+ }\n+ },\n+\n created () {\n this.cache = Object.create(null)\n this.keys = []\n@@ -72,6 +98,7 @@ export default {\n },\n \n mounted () {\n+ this.cacheVNode()\n this.$watch('include', val => {\n pruneCache(this, name => matches(val, name))\n })\n@@ -80,6 +107,10 @@ export default {\n })\n },\n \n+ updated () {\n+ this.cacheVNode()\n+ },\n+\n render () {\n const slot = this.$slots.default\n const vnode: VNode = getFirstComponentChild(slot)\n@@ -109,12 +140,9 @@ export default {\n remove(keys, key)\n keys.push(key)\n } else {\n- cache[key] = vnode\n- keys.push(key)\n- // prune oldest entry\n- if (this.max && keys.length > parseInt(this.max)) {\n- pruneCacheEntry(cache, keys[0], keys, this._vnode)\n- }\n+ // delay setting the cache until update\n+ this.vnodeToCache = vnode\n+ this.keyToCache = key\n }\n \n vnode.data.keepAlive = true", "filename": "src/core/components/keep-alive.js", "status": "modified" }, { "diff": "@@ -572,6 +572,73 @@ describe('Component keep-alive', () => {\n }).then(done)\n })\n \n+ it('max=1', done => {\n+ const spyA = jasmine.createSpy()\n+ const spyB = jasmine.createSpy()\n+ const spyC = jasmine.createSpy()\n+ const spyAD = jasmine.createSpy()\n+ const spyBD = jasmine.createSpy()\n+ const spyCD = jasmine.createSpy()\n+\n+ function assertCount (calls) {\n+ expect([\n+ spyA.calls.count(),\n+ spyAD.calls.count(),\n+ spyB.calls.count(),\n+ spyBD.calls.count(),\n+ spyC.calls.count(),\n+ spyCD.calls.count()\n+ ]).toEqual(calls)\n+ }\n+\n+ const vm = new Vue({\n+ template: `\n+ <keep-alive max=\"1\">\n+ <component :is=\"n\"></component>\n+ </keep-alive>\n+ `,\n+ data: {\n+ n: 'aa'\n+ },\n+ components: {\n+ aa: {\n+ template: '<div>a</div>',\n+ created: spyA,\n+ destroyed: spyAD\n+ },\n+ bb: {\n+ template: '<div>bbb</div>',\n+ created: spyB,\n+ destroyed: spyBD\n+ },\n+ cc: {\n+ template: '<div>ccc</div>',\n+ created: spyC,\n+ destroyed: spyCD\n+ }\n+ }\n+ }).$mount()\n+\n+ assertCount([1, 0, 0, 0, 0, 0])\n+ vm.n = 'bb'\n+ waitForUpdate(() => {\n+ // should prune A because max cache reached\n+ assertCount([1, 1, 1, 0, 0, 0])\n+ vm.n = 'cc'\n+ }).then(() => {\n+ // should prune B because max cache reached\n+ assertCount([1, 1, 1, 1, 1, 0])\n+ vm.n = 'bb'\n+ }).then(() => {\n+ // B is recreated\n+ assertCount([1, 1, 2, 1, 1, 1])\n+ vm.n = 'aa'\n+ }).then(() => {\n+ // B is destroyed and A recreated\n+ assertCount([2, 1, 2, 2, 1, 1])\n+ }).then(done)\n+ })\n+\n it('should warn unknown component inside', () => {\n new Vue({\n template: `<keep-alive><foo/></keep-alive>`", "filename": "test/unit/features/component/component-keep-alive.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n[https://codesandbox.io/s/kk445knpx3](https://codesandbox.io/s/kk445knpx3)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nIn the `Child.vue` we have a slot with a default slot content. In the `Parent.vue` we override the default content of the child. \r\n\r\n### What is expected?\r\nIMO the content of the slot in child component should not be evaluated since the content provided from the parent will be used.\r\n\r\n### What is actually happening?\r\nThe content of the parent it renders which is what I expect.\r\n\r\nThe default content of the `Child.vue` it's evaluated even if it not renders. That's why you see in the console: `Cannot read property 'length' of null`\r\n\r\n---\r\nMaybe that's not a bug and instead it's the intentional behavior, but it doesn't make sense to me, to evaluate something that will not render.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "I would say this is a bug as we should be able to pass a function to the slot function (`_t`) so the default slot content is only evaluated if no slot is provided.\r\n\r\nI thought a workaround would be using a `v-for` to make it lazy, but it doesn't work", "created_at": "2019-07-03T09:32:40Z" }, { "body": "Thanks for the quick response @posva . ", "created_at": "2019-07-03T09:37:33Z" } ], "number": 10224, "title": "Default slot content should not be evaluated when the parent is providing it" }
{ "body": "slot fallback content should be evaluated only when the parent is not providing it.<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [ ] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [ ] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [ ] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [ ] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n\r\nfixes #10224", "number": 12014, "review_comments": [], "title": "fix(slot): add a function to return the slot fallback content" }
{ "commits": [ { "message": "fix(slot): add a function to return the slot fallback content\n\nslot fallback content should be evaluated only when the parent is not providing it." }, { "message": "refactor: retro compatible render functions" } ], "files": [ { "diff": "@@ -547,7 +547,7 @@ export function genComment (comment: ASTText): string {\n function genSlot (el: ASTElement, state: CodegenState): string {\n const slotName = el.slotName || '\"default\"'\n const children = genChildren(el, state)\n- let res = `_t(${slotName}${children ? `,${children}` : ''}`\n+ let res = `_t(${slotName}${children ? `,function(){return ${children}}` : ''}`\n const attrs = el.attrs || el.dynamicAttrs\n ? genProps((el.attrs || []).concat(el.dynamicAttrs || []).map(attr => ({\n // slot props are camelized", "filename": "src/compiler/codegen/index.js", "status": "modified" }, { "diff": "@@ -7,26 +7,30 @@ import { extend, warn, isObject } from 'core/util/index'\n */\n export function renderSlot (\n name: string,\n- fallback: ?Array<VNode>,\n+ fallbackRender: ?((() => Array<VNode>) | Array<VNode>),\n props: ?Object,\n bindObject: ?Object\n ): ?Array<VNode> {\n const scopedSlotFn = this.$scopedSlots[name]\n let nodes\n- if (scopedSlotFn) { // scoped slot\n+ if (scopedSlotFn) {\n+ // scoped slot\n props = props || {}\n if (bindObject) {\n if (process.env.NODE_ENV !== 'production' && !isObject(bindObject)) {\n- warn(\n- 'slot v-bind without argument expects an Object',\n- this\n- )\n+ warn('slot v-bind without argument expects an Object', this)\n }\n props = extend(extend({}, bindObject), props)\n }\n- nodes = scopedSlotFn(props) || fallback\n+ nodes =\n+ scopedSlotFn(props) ||\n+ (fallbackRender &&\n+ (Array.isArray(fallbackRender) ? fallbackRender : fallbackRender()))\n } else {\n- nodes = this.$slots[name] || fallback\n+ nodes =\n+ this.$slots[name] ||\n+ (fallbackRender &&\n+ (Array.isArray(fallbackRender) ? fallbackRender : fallbackRender()))\n }\n \n const target = props && props.slot", "filename": "src/core/instance/render-helpers/render-slot.js", "status": "modified" }, { "diff": "@@ -109,6 +109,47 @@ describe('Component slot', () => {\n expect(child.$el.children[1].textContent).toBe('slot b')\n })\n \n+ it('it should work with previous versions of the templates', () => {\n+ const Test = {\n+ render() {\n+ var _vm = this;\n+ var _h = _vm.$createElement;\n+ var _c = _vm._self._c || vm._h;\n+ return _c('div', [_vm._t(\"default\", [_c('p', [_vm._v(\"slot default\")])])], 2)\n+ }\n+ }\n+ let vm = new Vue({\n+ template: `<test/>`,\n+ components: { Test }\n+ }).$mount()\n+ expect(vm.$el.textContent).toBe('slot default')\n+ vm = new Vue({\n+ template: `<test>custom content</test>`,\n+ components: { Test }\n+ }).$mount()\n+ expect(vm.$el.textContent).toBe('custom content')\n+ })\n+\n+ it('fallback content should not be evaluated when the parent is providing it', () => {\n+ const test = jasmine.createSpy('test')\n+ const vm = new Vue({\n+ template: '<test>slot default</test>',\n+ components: {\n+ test: {\n+ template: '<div><slot>{{test()}}</slot></div>',\n+ methods: {\n+ test () {\n+ test()\n+ return 'test'\n+ }\n+ }\n+ }\n+ }\n+ }).$mount()\n+ expect(vm.$el.textContent).toBe('slot default')\n+ expect(test).not.toHaveBeenCalled()\n+ })\n+\n it('selector matching multiple elements', () => {\n mount({\n childTemplate: '<div><slot name=\"t\"></slot></div>',", "filename": "test/unit/features/component/component-slot.spec.js", "status": "modified" }, { "diff": "@@ -196,7 +196,7 @@ describe('codegen', () => {\n it('generate slot fallback content', () => {\n assertCodegen(\n '<div><slot><div>hi</div></slot></div>',\n- `with(this){return _c('div',[_t(\"default\",[_c('div',[_v(\"hi\")])])],2)}`\n+ `with(this){return _c('div',[_t(\"default\",function(){return [_c('div',[_v(\"hi\")])]})],2)}`\n )\n })\n ", "filename": "test/unit/modules/compiler/codegen.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.12\r\n\r\n### Reproduction link\r\n[https://codepen.io/jiankafei/pen/jOqZdpG](https://codepen.io/jiankafei/pen/jOqZdpG)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\n\r\nClick the switch button, it will switch between two components with slots and without slots. The components without slots should not appear the slots passed to the components with slots\r\n\r\n### What is expected?\r\n\r\nShould display the fallback slot\r\n\r\n### What is actually happening?\r\n\r\nDisplays the previous slot\r\n\r\n---\r\n\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "https://codepen.io/jkarczm/pen/WNwMmZy", "created_at": "2020-09-09T08:03:39Z" }, { "body": "The workaround is to use a `key` on `alert-box` so Vue doesn't reuse it", "created_at": "2020-09-09T08:06:49Z" }, { "body": "@jacekkarczmarczyk @posva 三克油", "created_at": "2020-09-09T08:29:05Z" }, { "body": "hmm, this bug should be fixed, if yes, can i take this? :) ", "created_at": "2020-09-10T11:18:41Z" }, { "body": "as long as no PR has been submitted anybody can take any bug and give it a try 🙂 ", "created_at": "2020-09-10T11:43:23Z" } ], "number": 11652, "title": "slot is reused with v-if/v-else" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\nfix #11652", "number": 11795, "review_comments": [], "title": "fix: force update between two components with and without slot" }
{ "commits": [ { "message": "fix(test): npm test fails on Windows\nclose vuejs#11782" }, { "message": "chore: Merge remote-tracking branch 'upstream/dev' into dev" }, { "message": "fix(slot): force update when swtching between two components with slot\nand without slot\nclose #11652" } ], "files": [ { "diff": "@@ -234,7 +234,8 @@ export function updateChildComponent (\n const hasDynamicScopedSlot = !!(\n (newScopedSlots && !newScopedSlots.$stable) ||\n (oldScopedSlots !== emptyObject && !oldScopedSlots.$stable) ||\n- (newScopedSlots && vm.$scopedSlots.$key !== newScopedSlots.$key)\n+ (newScopedSlots && vm.$scopedSlots.$key !== newScopedSlots.$key) ||\n+ (!newScopedSlots && vm.$scopedSlots.$key)\n )\n \n // Any static slot children from the parent may have changed during parent's", "filename": "src/core/instance/lifecycle.js", "status": "modified" }, { "diff": "@@ -1325,4 +1325,28 @@ describe('Component scoped slot', () => {\n expect(vm.$el.textContent).toMatch(`1`)\n }).then(done)\n })\n+\n+ // #11652\n+ it('should update when swtching between two components with slot and without slot', done => {\n+ const Child = {\n+ template: `<div><slot/></div>`\n+ }\n+\n+ const parent = new Vue({\n+ template: `<div>\n+ <child v-if=\"flag\"><template #default>foo</template></child>\n+ <child v-else></child>\n+ </div>`,\n+ data: {\n+ flag: true\n+ },\n+ components: { Child }\n+ }).$mount()\n+\n+ expect(parent.$el.textContent).toMatch(`foo`)\n+ parent.flag=false\n+ waitForUpdate(()=>{\n+ expect(parent.$el.textContent).toMatch(``)\n+ }).then(done)\n+ })\n })", "filename": "test/unit/features/component/component-scoped-slot.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.11\r\n\r\n### Reproduction link\r\n[https://codepen.io/kaelwd/pen/rNLNYGL?editors=1011](https://codepen.io/kaelwd/pen/rNLNYGL?editors=1011)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nOpen the console\r\n\r\n### What is expected?\r\n`\"ScopedComponent beforeMount\" true` to be logged\r\n\r\n### What is actually happening?\r\n`\"ScopedComponent beforeMount\" false`\r\n\r\n---\r\nThis was discovered in vuetify: https://github.com/vuetifyjs/vuetify/issues/11533#issuecomment-705058227\r\n\r\nThe real setup is a watcher on a computed property that depends on `$slots`. The watcher is called before mount to get an initial value, causing the computed property to be executed and return an incorrect value. \r\n\r\nMinimal example with the watcher if that helps: https://codepen.io/kaelwd/pen/vYKYWbV?editors=1010\r\n\r\nThis works as expected in vue 3: https://codepen.io/kaelwd/pen/gOMOXoB?editors=1011\r\n\r\nThe bug seems to have existed since scoped slots were introduced in v2.1, but it wasn't a problem here until v2.6 when scoped slots started being used everywhere with `v-slot`\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "Hello, I fix this bug in PR #11726 .\r\nIn before mount, Vue2 will always create an empty object when init render. And in this PR, it will normalize scoped slots when parent vnode is not undefined.", "created_at": "2020-10-17T12:59:39Z" } ], "number": 11714, "title": "$scopedSlots is empty until render()" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\nClose #11714 ", "number": 11726, "review_comments": [], "title": "fix(render): $scopedSlots shoud not empty in beforeMount (#11714)" }
{ "commits": [ { "message": "fix(render): $scopedSlots shoud not empty in beforeMount (#11714)" } ], "files": [ { "diff": "@@ -23,7 +23,11 @@ export function initRender (vm: Component) {\n const parentVnode = vm.$vnode = options._parentVnode // the placeholder node in parent tree\n const renderContext = parentVnode && parentVnode.context\n vm.$slots = resolveSlots(options._renderChildren, renderContext)\n- vm.$scopedSlots = emptyObject\n+ vm.$scopedSlots = vm.$options._parentVnode ? normalizeScopedSlots(\n+ vm.$options._parentVnode.data.scopedSlots,\n+ vm.$slots,\n+ vm.$scopedSlots\n+ ) : emptyObject\n // bind the createElement fn to this instance\n // so that we get proper render context inside it.\n // args order: tag, data, children, normalizationType, alwaysNormalize", "filename": "src/core/instance/render.js", "status": "modified" }, { "diff": "@@ -1325,4 +1325,19 @@ describe('Component scoped slot', () => {\n expect(vm.$el.textContent).toMatch(`1`)\n }).then(done)\n })\n+\n+ // vm.$scopedSlots shoud not empty in beforeMount, see issue #11714\n+ it('$scopedSlots shoud not empty in beforeMount', () => {\n+ const vm = new Vue({\n+ template: `<foo>Slot content</foo>`,\n+ components: {\n+ foo: {\n+ template: `<div><slot></slot></div>`,\n+ beforeMount () {\n+ expect(Object.keys(this.$scopedSlots).length).not.toBe(0)\n+ }\n+ }\n+ }\n+ }).$mount()\n+ })\n })", "filename": "test/unit/features/component/component-scoped-slot.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.5.17\r\n\r\n\r\n### Reproduction link\r\nhttps://codepen.io/avertes/pen/LYYpNRe\r\n[https://jsfiddle.net/50wL7mdz/756973/](https://jsfiddle.net/50wL7mdz/756973/)\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\n1. Create a new component that accepts a string prop.\r\n1. Display the prop within the component's template.\r\n1. Use the component in a Vue application and pass a string containing a non-breaking space character for the prop.\r\n\r\n### What is expected?\r\nThe output should contain a non breaking space\r\n\r\n### What is actually happening?\r\nThe output shows `&nbsp;`\r\n\r\n---\r\nIn the example provided I've made 3 cases\r\n\r\n- First case is that `&#160;` get turned into `&nbsp;`\r\n- Second case is that in a long list of UTF-8 characters only `NON-BREAKING SPACE` is escaped.\r\n- And third when getting the same list of characters, but retrieving it from a regular `HTMLElement` with `document.querySelector('#test').title` the character aren't escape.\r\n\r\n\r\n**Note**: When copying the non-breaking space character it might turn into a regular space in the clipboard. Therefor use https://en.wikipedia.org/wiki/Non-breaking_space#Keyboard_entry_methods to make sure how to insert the character.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "I've added a new example - where in a list of html entities (including other whitespace and zero width characters) - only `&nbsp;` not decoded.\r\n\r\n- https://jsfiddle.net/50wL7mdz/765399/\r\n\r\nAlso I've noticed that in [mathiasbynens/he](https://github.com/mathiasbynens/he/) there are two entries for `&nbsp;` one without simicolon and one with simicolon. \r\n\r\n- https://github.com/mathiasbynens/he/blob/master/data/entities.json#L1470\r\n\r\nMight be a shot in the dark, but could this have any influence on the result above?", "created_at": "2018-10-17T08:27:38Z" }, { "body": "I've tried using [mathiasbynens/he](https://github.com/mathiasbynens/he/) v1.1.1 to find any inconsistencies with `&nbsp;`, but haven't been able to find any.\r\n\r\nI've tried duplicating my previous jsfiddles, to see if `&nbsp;` would react any different.\r\n- https://jsfiddle.net/ebpcx1do/3/\r\n\r\nIf anyone can give me some pointers for where to look for this error, I'm more than happy to give it a shot.\r\n\r\nI would also very much appreciate any response on issue.\r\n\r\n", "created_at": "2018-10-29T14:05:05Z" }, { "body": "Looks like a bug to me - a fix would be nice ", "created_at": "2018-10-30T09:25:46Z" }, { "body": "Ive noticed issues with `&times;` on occasion. say a component prop has a default of `&times;`, which is rendered in the component (v-html). When rendering it shows the actual `x` character and causes an SSR hydration bail.", "created_at": "2018-11-02T22:24:52Z" }, { "body": "Other example: https://jsfiddle.net/onbzk0m6/ (character &nbsp;)", "created_at": "2018-11-23T10:34:19Z" }, { "body": "Yeah - it does seem to be related the way Vue parses html attributes in general.\r\n\r\nI've made a similar example based of @approached example\r\nhttps://jsfiddle.net/onbzk0m6/3/\r\n\r\n\r\n\r\n\r\n", "created_at": "2018-11-23T13:08:56Z" }, { "body": "as a workaround, try to put the JS escape code for `&nbsp;` - `\\xa0`\r\n\r\nfound [here](https://github.com/kazupon/vue-i18n/issues/318#issuecomment-380103519), worked for me in nonbreakinspacification function\r\n```\r\n public static noBreakingSpaces(str: string): string {\r\n return str.replace(' ', '\\xa0');\r\n }\r\n```\r\nstrings returned by this function are being rendered with `&nbsp`s instead of spaces.", "created_at": "2019-10-21T17:31:43Z" }, { "body": "I stumbled upon the same issue: The html entity &shy; results in\"&shy;\" as text when used in templates instead of the soft hyphen. The same for \"&#173;\" and others.\r\nVue 2.6.10", "created_at": "2020-02-24T19:11:54Z" }, { "body": "for folks that are looking for a workaround for this:\r\nWhen passing a string with non-breaking spaces as a prop, I just replaced the spaces with characters unlikely to be part of the string, in my case 'zzz'\r\n`var newName = program_name.replace(/\\s/g,'zzz');`\r\n\r\nThen, in the actual component where I need to display or use this prop, I have a computed function that undoes the above action and replaces the placeholder chars with spaces again\r\n\r\n```\r\ncorrectedProgramName(){ //this reverses the space-replacing we had to do in reporting-dashboard.js\r\n let correctedProgramName = this.program_name.replace(/zzz/g, ' ')\r\n return correctedProgramName\r\n }\r\n```", "created_at": "2020-07-28T16:33:07Z" }, { "body": "@posva Sorry to trouble you , I found a solution for this issue. \r\n\r\nAfter my fix, the behavior of Vue will be the same as HTML, just like the screenshot below,\r\n\r\nCan I pick up this issue and make a Pull Request?\r\n\r\n![image](https://user-images.githubusercontent.com/14243906/89121411-58307900-d4f1-11ea-9569-4db2ad67ffd5.png)\r\n\r\n \r\n![image](https://user-images.githubusercontent.com/14243906/89121517-2b309600-d4f2-11ea-8214-4f879958b528.png)\r\n", "created_at": "2020-08-02T11:26:19Z" }, { "body": "Sure @JuniorTour \r\n\r\nThis issue might be related to https://github.com/vuejs/vue/issues/10485 and https://github.com/vuejs/vue/issues/11059\r\nSo it's worth taking a look at the existing PRs as well", "created_at": "2020-08-03T07:33:50Z" }, { "body": "@posva \r\nCan you or someone else please revisit this issue? @JuniorTour 's PR #11599 will fix the problem mentioned where a non-breaking space character entered in props is output as &nbsp; in the template. This is causing problems for us in a project where we are passing translated strings to props and need to preserve nbsp, and it would be great to have this in place rather than having to do a string replace on everything coming back from translation. Thanks!", "created_at": "2021-07-01T11:22:36Z" } ], "number": 8895, "title": "Non-breaking space acts different than other characters - outputs \"&nbsp;\" in template when passed via props" }
{ "body": "### Profit:\r\n\r\n1. Close #8895, #8805, #10121 and many issues about decoding HTML entities.\r\n2. Go a step further to decode all the HTML entities in the value of prop and attribute. Ensuring consistent behavior between Vue.js and browser. Solving the HTML entities decode issue once and for all.\r\n3. Optimizing the particular logic of fixing #3663 and #6828, simplify the code, improve the robustness.\r\n\r\n### DEMO:\r\n| | Before Fix | After Fix |\r\n|---|---|---|\r\n| Reproduction Link | https://jsfiddle.net/juniortour/op1L4z3s/ | https://jsfiddle.net/juniortour/qg9pwz7v/\r\n| Screenshot | ![A06J9%`ECX6R${Q{0GI7IU8](https://user-images.githubusercontent.com/14243906/130987710-c4fed9ea-da96-420e-bec8-4bce5a3eff0f.png) | ![(V~30B(4I%%$K%F4Z4BEW)4](https://user-images.githubusercontent.com/14243906/130987835-e8d24c01-96c6-4a50-ab4b-494cae20525d.png)\r\n\r\n- Fix #8895\r\n- Fix #8805\r\n- Fix #10121\r\n\r\n<hr/>\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 11599, "review_comments": [], "title": "fix(compiler): support decode all HTML entities in the value of prop and attribute (fix #8895)" }
{ "commits": [ { "message": "fix(compiler): decode all HTML entities in the value of prop and attribute (fix #8895)" } ], "files": [ { "diff": "@@ -0,0 +1,6 @@\n+/* @flow */\n+\n+import he from 'he'\n+import { cached } from 'shared/util'\n+\n+export default cached(he.decode)", "filename": "src/compiler/parser/html-decoder.js", "status": "added" }, { "diff": "@@ -12,6 +12,7 @@\n import { makeMap, no } from 'shared/util'\n import { isNonPhrasingTag } from 'web/compiler/util'\n import { unicodeRegExp } from 'core/util/lang'\n+import decodeHTML from './html-decoder'\n \n // Regular Expressions for parsing tags and attributes\n const attribute = /^\\s*([^\\s\"'<>\\/=]+)(?:\\s*(=)\\s*(?:\"([^\"]*)\"+|'([^']*)'+|([^\\s\"'=<>`]+)))?/\n@@ -30,27 +31,10 @@ const conditionalComment = /^<!\\[/\n export const isPlainTextElement = makeMap('script,style,textarea', true)\n const reCache = {}\n \n-const decodingMap = {\n- '&lt;': '<',\n- '&gt;': '>',\n- '&quot;': '\"',\n- '&amp;': '&',\n- '&#10;': '\\n',\n- '&#9;': '\\t',\n- '&#39;': \"'\"\n-}\n-const encodedAttr = /&(?:lt|gt|quot|amp|#39);/g\n-const encodedAttrWithNewLines = /&(?:lt|gt|quot|amp|#39|#10|#9);/g\n-\n // #5992\n const isIgnoreNewlineTag = makeMap('pre,textarea', true)\n const shouldIgnoreFirstNewline = (tag, html) => tag && isIgnoreNewlineTag(tag) && html[0] === '\\n'\n \n-function decodeAttr (value, shouldDecodeNewlines) {\n- const re = shouldDecodeNewlines ? encodedAttrWithNewLines : encodedAttr\n- return value.replace(re, match => decodingMap[match])\n-}\n-\n export function parseHTML (html, options) {\n const stack = []\n const expectHTML = options.expectHTML\n@@ -229,12 +213,9 @@ export function parseHTML (html, options) {\n for (let i = 0; i < l; i++) {\n const args = match.attrs[i]\n const value = args[3] || args[4] || args[5] || ''\n- const shouldDecodeNewlines = tagName === 'a' && args[1] === 'href'\n- ? options.shouldDecodeNewlinesForHref\n- : options.shouldDecodeNewlines\n attrs[i] = {\n name: args[1],\n- value: decodeAttr(value, shouldDecodeNewlines)\n+ value: decodeHTML(value)\n }\n if (process.env.NODE_ENV !== 'production' && options.outputSourceRange) {\n attrs[i].start = args.start + args[0].match(/^\\s*/).length", "filename": "src/compiler/parser/html-parser.js", "status": "modified" }, { "diff": "@@ -1,12 +1,12 @@\n /* @flow */\n \n-import he from 'he'\n import { parseHTML } from './html-parser'\n import { parseText } from './text-parser'\n import { parseFilters } from './filter-parser'\n import { genAssignmentCode } from '../directives/model'\n-import { extend, cached, no, camelize, hyphenate } from 'shared/util'\n+import { extend, no, camelize, hyphenate } from 'shared/util'\n import { isIE, isEdge, isServerRendering } from 'core/util/env'\n+import decodeHTML from './html-decoder'\n \n import {\n addProp,\n@@ -42,8 +42,6 @@ const whitespaceRE = /[ \\f\\t\\r\\n]+/g\n \n const invalidAttributeRE = /[\\s\"'<>\\/=]/\n \n-const decodeHTMLCached = cached(he.decode)\n-\n export const emptySlotScopeToken = `_empty_`\n \n // configurable state\n@@ -210,8 +208,6 @@ export function parse (\n expectHTML: options.expectHTML,\n isUnaryTag: options.isUnaryTag,\n canBeLeftOpenTag: options.canBeLeftOpenTag,\n- shouldDecodeNewlines: options.shouldDecodeNewlines,\n- shouldDecodeNewlinesForHref: options.shouldDecodeNewlinesForHref,\n shouldKeepComment: options.comments,\n outputSourceRange: options.outputSourceRange,\n start (tag, attrs, unary, start, end) {\n@@ -339,7 +335,7 @@ export function parse (\n }\n const children = currentParent.children\n if (inPre || text.trim()) {\n- text = isTextTag(currentParent) ? text : decodeHTMLCached(text)\n+ text = isTextTag(currentParent) ? text : decodeHTML(text)\n } else if (!children.length) {\n // remove the whitespace-only node right after an opening tag\n text = ''", "filename": "src/compiler/parser/index.js", "status": "modified" }, { "diff": "@@ -7,7 +7,6 @@ import { mark, measure } from 'core/util/perf'\n import Vue from './runtime/index'\n import { query } from './util/index'\n import { compileToFunctions } from './compiler/index'\n-import { shouldDecodeNewlines, shouldDecodeNewlinesForHref } from './util/compat'\n \n const idToTemplate = cached(id => {\n const el = query(id)\n@@ -64,8 +63,6 @@ Vue.prototype.$mount = function (\n \n const { render, staticRenderFns } = compileToFunctions(template, {\n outputSourceRange: process.env.NODE_ENV !== 'production',\n- shouldDecodeNewlines,\n- shouldDecodeNewlinesForHref,\n delimiters: options.delimiters,\n comments: options.comments\n }, this)", "filename": "src/platforms/web/entry-runtime-with-compiler.js", "status": "modified" }, { "diff": "@@ -2,6 +2,7 @@ import { parse } from 'compiler/parser/index'\n import { extend } from 'shared/util'\n import { baseOptions } from 'web/compiler/options'\n import { isIE, isEdge } from 'core/util/env'\n+import Vue from 'vue'\n \n describe('parser', () => {\n it('simple element', () => {\n@@ -925,4 +926,38 @@ describe('parser', () => {\n expect(`<template v-slot> can only appear at the root level inside the receiving the component`)\n .not.toHaveBeenWarned()\n })\n+\n+ it(`HTML entities in the value of attribute should be decoded`, () => {\n+ const options = extend({}, baseOptions)\n+ const ast = parse('<input value=\"white&nbsp;space,single-&#39;-quote,double-&quot;-quote,an-&amp;-ampersand,less-&lt;-than,great-&gt;-than,line-&#10;-break,tab-&#9;-space\" />', options)\n+ expect(ast.attrsList[0].value).toBe('white space,single-' + \"'\" + '-quote,double-' + '\"' + '-quote,an-&-ampersand,less-<-than,great->-than,line-\\n-break,tab-\\t-space')\n+ })\n+\n+ it(`HTML entities in template should be decoded`, () => {\n+ const vm = new Vue({\n+ template: '<test></test>',\n+ components: {\n+ test: {\n+ template: '<input value=\"&#102;&#111;&#111;\">'\n+ }\n+ }\n+ }).$mount()\n+ expect(vm.$el.value).toBe('foo')\n+ })\n+\n+ it(`HTML entities in the value of props should be decoded`, () => {\n+ const vm = new Vue({\n+ template: '<test name=\"-&nbsp;-\"></test>',\n+ components: {\n+ test: {\n+ template: '<div>{{ name }}</div>',\n+ props: {\n+ name: String,\n+ },\n+ }\n+ }\n+ }).$mount()\n+ expect(vm.$el.innerHTML).toBe('-&nbsp;-')\n+ expect(vm.$el.innerText).toBe('- -')\n+ })\n })", "filename": "test/unit/modules/compiler/parser.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.11\r\n\r\n### Reproduction link\r\n\r\nhttps://jsfiddle.net/posva/vczwh82g/2/\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\n\r\nIn Safari click different links to load iframes\r\n\r\n### What is expected?\r\n\r\nall iframes should load\r\n\r\n### What is actually happening?\r\n\r\nthe one rendered inside vue doesn't load\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "This is weird. I would guess Vue is adding some attribute or property that is making this fail in Safari but can't tell which one", "created_at": "2020-08-06T10:27:30Z" }, { "body": "Hey @posva may i give this a shot? i'd like to contribute", "created_at": "2020-08-07T09:28:02Z" }, { "body": "for sure @christoph-schaeffer !", "created_at": "2020-08-07T10:12:33Z" }, { "body": "I've created a PR with a detailed explanation on what caused this.", "created_at": "2020-08-08T11:05:39Z" } ], "number": 11569, "title": "Safari doesn't see frame inside vue app" }
{ "body": "fix: When changing the attribute 'name' of an iframe element, the iframes window must be updated aswell. Otherwise anchor tags with a 'target' attribute won't hit the right iframe.\r\n\r\nClose #11569\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\nThis Issue actually wasn’t only present in safari. When iframes are created a window will be attached to the iframe which actually is the target. When updating the name attribute of an existing iframe the window inside the iframe doesn’t get updated.\r\n\r\nIn native javascript you could fix this with:\r\ndocument.querySelector('iframe').documentWindow.name = 'theNewName';\r\n\r\nThe reproduction fiddle in the Issue #11569 only is bugged in safari, but if you try to change the attribute dynamically it doesn’t work in chrome or firefox either.\r\n\r\nCheck this fiddle for the cross-platform issue : https://jsfiddle.net/ug2jhmL6/1/\r\n", "number": 11578, "review_comments": [ { "body": "Is there any reason we need a parameter instead of directly using `el` inside of the function?", "created_at": "2021-03-24T11:07:49Z" }, { "body": "I could be wrong, but AFAICS this could trigger a browser security error, if the `iFrame` is cross-origin, e.g. like this:\r\n\r\n```\r\nUncaught DOMException: Blocked a frame with origin \"http://localhost:8080\" from accessing a cross-origin frame.\r\n```", "created_at": "2021-07-13T15:43:24Z" } ], "title": "fix: iframe-window-name-update" }
{ "commits": [ { "message": "fix: When changing the attribute 'name' of an iframe element, the iframes window must be updated aswell. Otherwise anchor tags with a 'target' attribute won't hit the right iframe." }, { "message": "fix: Changes after review: When changing the attribute 'name' of an iframe element, the iframes window must be updated as well. Otherwise anchor tags with a 'target' attribute won't hit the right iframe." }, { "message": "chore: disable line flow" } ], "files": [ { "diff": "@@ -109,7 +109,13 @@ function baseSetAttr (el, key, value) {\n // $flow-disable-line\n el.__ieph = true /* IE placeholder patched */\n }\n- el.setAttribute(key, value)\n+ // When changing the attribute 'name' of an iframe element, the iframes window must be updated as well. Otherwise\n+ // anchor tags with a 'target' attribute won't hit the right iframe.\n+ if (el.tagName === 'IFRAME' && key === 'name' && el.contentWindow) {\n+ // $flow-disable-line\n+ el.contentWindow.name = value;\n+ }\n+ el.setAttribute(key, value);\n }\n }\n ", "filename": "src/platforms/web/runtime/modules/attrs.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.11\r\n\r\n### Reproduction link\r\n[https://jsfiddle.net/0jh1psog/](https://jsfiddle.net/0jh1psog/)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nOpen the console in the provided JSFiddle\r\nor\r\nrun `Vue.config.getTagNamespace('foreignObject')`\r\n\r\n### What is expected?\r\n`Vue.config.getTagNamespace('foreignObject')` should return `'svg'`\r\n\r\n### What is actually happening?\r\nIt returns `undefined`.\r\n\r\nThis causes a warning `Unknown custom element: <foreignObject>` when testing such components using `vue-test-utils`\r\n\r\n---\r\nThis is caused by https://github.com/vuejs/vue/blob/52719ccab8fccffbdf497b96d3731dc86f04c1ce/src/platforms/web/util/element.js#L29\r\nIt should be `foreignobject` with lowercase `o` here, because of the `makeMap` implementation.\r\n\r\nTo avoid such bugs in the future, even better fix:\r\nhttps://github.com/vuejs/vue/blob/6fe07ebf5ab3fea1860c59fe7cdd2ec1b760f9b0/src/shared/util.js#L113\r\nshould be `map[expectsLowerCase ? list[i].toLowerCase() : list[i]] = true`\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "Workaround (can go for example in one of your [Jest setup files](https://jestjs.io/docs/en/configuration#setupfiles-array)):\r\n```\r\nimport Vue from 'vue';\r\nconst { getTagNamespace } = Vue.config;\r\nVue.config.getTagNamespace = (tag) => {\r\n if (tag.toLowerCase() === 'foreignobject') {\r\n return 'svg';\r\n }\r\n return getTagNamespace(tag);\r\n};\r\n```", "created_at": "2020-08-07T14:30:05Z" } ], "number": 11575, "title": "Unknown custom element: <foreignObject>" }
{ "body": "Fix #11575\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 11576, "review_comments": [], "title": "fix: give correct namespace in foreignObject" }
{ "commits": [ { "message": "fix: give correct namespace to foreignObject\n\nFix #11575" } ], "files": [ { "diff": "@@ -26,7 +26,7 @@ export const isHTMLTag = makeMap(\n // contain child elements.\n export const isSVG = makeMap(\n 'svg,animate,circle,clippath,cursor,defs,desc,ellipse,filter,font-face,' +\n- 'foreignObject,g,glyph,image,line,marker,mask,missing-glyph,path,pattern,' +\n+ 'foreignobject,g,glyph,image,line,marker,mask,missing-glyph,path,pattern,' +\n 'polygon,polyline,rect,switch,symbol,text,textpath,tspan,use,view',\n true\n )", "filename": "src/platforms/web/util/element.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.11\r\n\r\n### Reproduction link\r\n[https://codepen.io/kubotafumiya/pen/qBOwEOV](https://codepen.io/kubotafumiya/pen/qBOwEOV)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\n1. Set the el option to an element that contains a textarea (or pre) with a newline at the beginning, and create a Vue instance.\r\n\r\n\r\n```html\r\n<div id=\"app\">\r\n<textarea>\r\n\r\n\r\nThree line breaks are ignored.\r\n</textarea>\r\n</div>\r\n```\r\n\r\n```javascript\r\nnew Vue({\r\n el: '#app',\r\n});\r\n```\r\n\r\n### What is expected?\r\nThe line breaks are removed.\r\n\r\n\r\n### What is actually happening?\r\nLine breaks are not deleted.\r\n\r\n---\r\nhttps://html.spec.whatwg.org/multipage/syntax.html#element-restrictions\r\n\r\nI think it's okay for a single line break to be ignored according to this specification.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "I found the reason. Can i take this issue.", "created_at": "2020-06-15T09:30:21Z" }, { "body": "@wonhyoung05 Yes, I couldn't find an existing issue or PR for this problem", "created_at": "2020-06-15T09:53:51Z" } ], "number": 11446, "title": "Line breaks in textarea are excessively removed." }
{ "body": "fix: #11446 \r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [X] Bugfix \r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [X] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [X] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [X] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [X] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 11463, "review_comments": [], "title": "fix(compiler): Line breaks in textarea are not removed. (fix: #11446)" }
{ "commits": [ { "message": "fix(compiler/parser/html-parser.js): Line breaks in textareas are not removed.(fix: #11446)" } ], "files": [ { "diff": "@@ -44,7 +44,7 @@ const encodedAttrWithNewLines = /&(?:lt|gt|quot|amp|#39|#10|#9);/g\n \n // #5992\n const isIgnoreNewlineTag = makeMap('pre,textarea', true)\n-const shouldIgnoreFirstNewline = (tag, html) => tag && isIgnoreNewlineTag(tag) && html[0] === '\\n'\n+const shouldIgnoreFirstNewline = (tag, html) => tag && isIgnoreNewlineTag(tag) && html[0] === '\\n' && html[1] && html[1] !== '\\n'\n \n function decodeAttr (value, shouldDecodeNewlines) {\n const re = shouldDecodeNewlines ? encodedAttrWithNewLines : encodedAttr", "filename": "src/compiler/parser/html-parser.js", "status": "modified" }, { "diff": "@@ -698,7 +698,7 @@ describe('parser', () => {\n expect(text.text).toBe('\\ndef')\n const pre2 = ast.children[2]\n expect(pre2.children[0].type).toBe(3)\n- expect(pre2.children[0].text).toBe('\\nabc')\n+ expect(pre2.children[0].text).toBe('\\n\\nabc')\n })\n \n it('keep first newline after unary tag in <pre>', () => {\n@@ -870,7 +870,7 @@ describe('parser', () => {\n expect(text.text).toBe(' def')\n const pre2 = ast.children[2]\n expect(pre2.children[0].type).toBe(3)\n- expect(pre2.children[0].text).toBe('\\nabc')\n+ expect(pre2.children[0].text).toBe('\\n\\nabc')\n })\n \n it(`keep first newline after unary tag in <pre> with whitespace: 'condense'`, () => {", "filename": "test/unit/modules/compiler/parser.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.11\r\n\r\n### Reproduction link\r\n[https://jsfiddle.net/AleksandrasNovikovas/w042x1c8/](https://jsfiddle.net/AleksandrasNovikovas/w042x1c8/)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nRun provided fiddle. There are three svg boxes with foreignObject:\r\n1. contains simple html tags\r\n2. contains simple vue component\r\n3. contains complex (with slot) vue component\r\n\r\n\r\n### What is expected?\r\nAll three boxes should show link and input elements\r\n\r\n### What is actually happening?\r\nThird box does not show link and input elements.\r\n\r\n---\r\nWhile inspecting DOM (in chrome or in firefox) you will find that elements of second box and third box are identical.\r\nProblem is their types: (in chome dev console select element and tab properties)\r\nselect input element from second box and you will find following list: Object->EventTarget->Node->Element->HTMLElement->HTMLInputElement->input;\r\nselect input element from third box and you will find following list: Object->EventTarget->Node->Element->SVGElement->input;\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "#11349 This is not a problem anymore because I have fixed it and submitted PR😁.", "created_at": "2020-04-26T14:15:38Z" }, { "body": "Can we get this reviewed and fixed? I don't get why this obvious issue is not treated.", "created_at": "2021-02-19T07:16:50Z" } ], "number": 11315, "title": "Components slots are not rendered inside svg foreignObject" }
{ "body": "Fix #11315\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [ ] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [ ] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 11349, "review_comments": [], "title": "fix(svg): apply every namespace in slot when call applyNS" }
{ "commits": [ { "message": "build: use __DEV__ flag" }, { "message": "refactor: isArray" }, { "message": "workflow: allow wip commits" }, { "message": "wip(vca): partial ref and watch implementation" }, { "message": "wip: fix entry with compiler" }, { "message": "wip: ref tests passing" }, { "message": "wip: reactive tests passing" }, { "message": "wip: computed tests passing" }, { "message": "chore: remove duplicated types" }, { "message": "refactor: simplify implementations\n\nsince we output es5, classes are more verbose then necessary" }, { "message": "ci: fix ci" }, { "message": "ci: __DEV__ flag for e2e tests" }, { "message": "wip: readonly" }, { "message": "wip: shallowReactive/shallowReadonly" }, { "message": "types: types for setup() + format types" }, { "message": "wip: rename dir to v3" }, { "message": "chore: format typescript dir" }, { "message": "wip: lifecycle api" }, { "message": "chore: use isFunction" }, { "message": "wip: setup() tests" }, { "message": "wip: apiWatch tests" }, { "message": "chore: fix ssr tests" }, { "message": "chore: fix unit tests" }, { "message": "wip: effectScope" }, { "message": "wip: onTrack debugger option" }, { "message": "wip: onTrack/onTrigger debugger options" }, { "message": "wip: renderTriggered/renderTracked" }, { "message": "wip: provide/inject" }, { "message": "chore: rename template ref module" }, { "message": "wip: setup() template refs support" } ], "files": [ { "diff": "@@ -0,0 +1,4 @@\n+# chore: move to typescript\n+af9fc2bcff31d5baa413039818a9b3e011deccaf\n+# workflow: remove eslint, apply prettier\n+72aed6a149b94b5b929fb47370a7a6d4cb7491c5", "filename": ".git-blame-ignore-revs", "status": "added" }, { "diff": "@@ -10,4 +10,4 @@ Project maintainers have the right and responsibility to remove, edit, or reject\n \n Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting one or more of the project maintainers.\n \n-This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.0.0, available at [http://contributor-covenant.org/version/1/0/0/](http://contributor-covenant.org/version/1/0/0/)\n+This Code of Conduct is adapted from the [Contributor Covenant](https://contributor-covenant.org), version 1.0.0, available at [http://contributor-covenant.org/version/1/0/0/](http://contributor-covenant.org/version/1/0/0/)", "filename": ".github/CODE_OF_CONDUCT.md", "status": "modified" }, { "diff": "@@ -25,6 +25,7 @@ Hi! I'm really excited that you are interested in contributing to Vue.js. Before\n - Make sure `npm test` passes. (see [development setup](#development-setup))\n \n - If adding a new feature:\n+\n - Add accompanying test case.\n - Provide a convincing reason to add this feature. Ideally, you should open a suggestion issue first and have it approved before working on it.\n \n@@ -35,12 +36,12 @@ Hi! I'm really excited that you are interested in contributing to Vue.js. Before\n \n ## Development Setup\n \n-You will need [Node.js](http://nodejs.org) **version 8+**, [Java Runtime Environment](http://www.oracle.com/technetwork/java/javase/downloads/index.html) (for running Selenium server during e2e tests) and [yarn](https://yarnpkg.com/en/docs/install).\n+You will need [Node.js](https://nodejs.org) **version 12+** and [pnpm](https://pnpm.io/).\n \n After cloning the repo, run:\n \n-``` bash\n-$ yarn # install the dependencies of the project\n+```bash\n+$ pnpm i # install the dependencies of the project\n ```\n \n ### Committing Changes\n@@ -49,17 +50,20 @@ Commit messages should follow the [commit message convention](./COMMIT_CONVENTIO\n \n ### Commonly used NPM scripts\n \n-``` bash\n+```bash\n # watch and auto re-build dist/vue.js\n $ npm run dev\n \n-# watch and auto re-run unit tests in Chrome\n-$ npm run dev:test\n+# run unit tests\n+$ npm run test:unit\n+\n+# run specific tests in watch mode\n+$ npx vitest {test_file_name_pattern_to_match}\n \n # build all dist files, including npm packages\n $ npm run build\n \n-# run the full test suite, including linting/type checking\n+# run the full test suite, including unit/e2e/type checking\n $ npm test\n ```\n \n@@ -79,9 +83,15 @@ The default test script will do the following: lint with ESLint -> type check wi\n \n See [dist/README.md](https://github.com/vuejs/vue/blob/dev/dist/README.md) for more details on dist files.\n \n-- **`flow`**: contains type declarations for [Flow](https://flowtype.org/). These declarations are loaded **globally** and you will see them used in type annotations in normal source code.\n+- **`types`**: contains public types published to npm (note the types shipped here could be different from `src/types`). These were hand-authored before we moved the codebase from Flow to TypeScript. To ensure backwards compatibility, we keep using these manually authored types.\n+\n+ Types for new features added in 2.7 (Composition API) are auto-generated from source code as `types/v3-generated.d.ts` and re-exported from `types/index.d.ts`.\n+\n+- **`packages`**:\n \n-- **`packages`**: contains `vue-server-renderer` and `vue-template-compiler`, which are distributed as separate NPM packages. They are automatically generated from the source code and always have the same version with the main `vue` package.\n+ - `vue-server-renderer` and `vue-template-compiler` are distributed as separate NPM packages. They are automatically generated from the source code and always have the same version with the main `vue` package.\n+\n+ - `compiler-sfc` is an internal package that is distributed as part of the main `vue` package. It's aliased and can be imported as `vue/compiler-sfc` similar to Vue 3.\n \n - **`test`**: contains all tests. The unit tests are written with [Jasmine](http://jasmine.github.io/2.3/introduction.html) and run with [Karma](http://karma-runner.github.io/0.13/index.html). The e2e tests are written for and run with [Nightwatch.js](http://nightwatchjs.org/).\n \n@@ -111,16 +121,13 @@ The default test script will do the following: lint with ESLint -> type check wi\n \n Entry files for dist builds are located in their respective platform directory.\n \n- Each platform module contains three parts: `compiler`, `runtime` and `server`, corresponding to the three directories above. Each part contains platform-specific modules/utilities which are imported and injected to the core counterparts in platform-specific entry files. For example, the code implementing the logic behind `v-bind:class` is in `platforms/web/runtime/modules/class.js` - which is imported in `entries/web-runtime.js` and used to create the browser-specific vdom patching function.\n+ Each platform module contains three parts: `compiler`, `runtime` and `server`, corresponding to the three directories above. Each part contains platform-specific modules/utilities which are imported and injected to the core counterparts in platform-specific entry files. For example, the code implementing the logic behind `v-bind:class` is in `platforms/web/runtime/modules/class.js` - which is imported in `platforms/web/entry-runtime.ts` and used to create the browser-specific vdom patching function.\n \n - **`sfc`**: contains single-file component (`*.vue` files) parsing logic. This is used in the `vue-template-compiler` package.\n \n - **`shared`**: contains utilities shared across the entire codebase.\n \n- - **`types`**: contains TypeScript type definitions\n-\n- - **`test`**: contains type definitions tests\n-\n+ - **`types`**: contains type declarations added when we ported the codebase from Flow to TypeScript. These types should be considered internal - they care less about type inference for end-user scenarios and prioritize working with internal source code.\n \n ## Financial Contribution\n ", "filename": ".github/CONTRIBUTING.md", "status": "modified" }, { "diff": "@@ -24,7 +24,7 @@ If yes, please describe the impact and migration path for existing applications:\n \n **The PR fulfills these requirements:**\n \n-- [ ] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\n+- [ ] It's submitted to the `main` branch for v2.x (or to a previous version branch)\n - [ ] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\n - [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\n - [ ] New/updated tests are included", "filename": ".github/PULL_REQUEST_TEMPLATE.md", "status": "modified" }, { "diff": "@@ -0,0 +1,93 @@\n+name: 'ci'\n+on:\n+ push:\n+ branches:\n+ - main\n+ pull_request:\n+ branches:\n+ - main\n+jobs:\n+ unit-test:\n+ runs-on: ubuntu-latest\n+ steps:\n+ - uses: actions/checkout@v2\n+\n+ - name: Install pnpm\n+ uses: pnpm/action-setup@v2\n+\n+ - name: Set node version to 16\n+ uses: actions/setup-node@v2\n+ with:\n+ node-version: 16\n+ cache: 'pnpm'\n+\n+ - run: pnpm install\n+\n+ - name: Run unit tests\n+ run: pnpm run test:unit\n+\n+ ssr-sfc-test:\n+ runs-on: ubuntu-latest\n+ steps:\n+ - uses: actions/checkout@v2\n+\n+ - name: Install pnpm\n+ uses: pnpm/action-setup@v2\n+\n+ - name: Set node version to 16\n+ uses: actions/setup-node@v2\n+ with:\n+ node-version: 16\n+ cache: 'pnpm'\n+\n+ - run: pnpm install\n+\n+ - name: Run SSR tests\n+ run: pnpm run test:ssr\n+\n+ - name: Run compiler-sfc tests\n+ run: pnpm run test:sfc\n+\n+ e2e-test:\n+ runs-on: ubuntu-latest\n+ steps:\n+ - uses: actions/checkout@v2\n+\n+ - name: Install pnpm\n+ uses: pnpm/action-setup@v2\n+\n+ - name: Set node version to 16\n+ uses: actions/setup-node@v2\n+ with:\n+ node-version: 16\n+ cache: 'pnpm'\n+\n+ - run: pnpm install\n+\n+ - name: Run e2e tests\n+ run: pnpm run test:e2e\n+\n+ - name: Run transition tests\n+ run: pnpm run test:transition\n+\n+ type-test:\n+ runs-on: ubuntu-latest\n+ steps:\n+ - uses: actions/checkout@v2\n+\n+ - name: Install pnpm\n+ uses: pnpm/action-setup@v2\n+\n+ - name: Set node version to 16\n+ uses: actions/setup-node@v2\n+ with:\n+ node-version: 16\n+ cache: 'pnpm'\n+\n+ - run: pnpm install\n+\n+ - name: Run srouce type check\n+ run: pnpm run ts-check\n+\n+ - name: Run type declaration tests\n+ run: pnpm run test:types", "filename": ".github/workflows/ci.yml", "status": "added" }, { "diff": "@@ -0,0 +1,23 @@\n+on:\n+ push:\n+ tags:\n+ - 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10\n+\n+name: Create Release\n+\n+jobs:\n+ build:\n+ name: Create Release\n+ runs-on: ubuntu-latest\n+ steps:\n+ - name: Checkout code\n+ uses: actions/checkout@master\n+ - name: Create Release for Tag\n+ id: release_tag\n+ uses: yyx990803/release-tag@master\n+ env:\n+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n+ with:\n+ tag_name: ${{ github.ref }}\n+ body: |\n+ Please refer to [CHANGELOG.md](https://github.com/vuejs/vue/blob/main/CHANGELOG.md) for details.", "filename": ".github/workflows/release-tag.yml", "status": "added" }, { "diff": "@@ -3,17 +3,15 @@ node_modules\n *.log\n explorations\n TODOs.md\n-dist/*.gz\n-dist/*.map\n-dist/vue.common.min.js\n-test/e2e/reports\n-test/e2e/screenshots\n-coverage\n RELEASE_NOTE*.md\n-dist/*.js\n-packages/vue-server-renderer/basic.js\n-packages/vue-server-renderer/build.js\n-packages/vue-server-renderer/server-plugin.js\n-packages/vue-server-renderer/client-plugin.js\n-packages/vue-template-compiler/build.js\n+packages/server-renderer/basic.js\n+packages/server-renderer/build.dev.js\n+packages/server-renderer/build.prod.js\n+packages/server-renderer/server-plugin.js\n+packages/server-renderer/client-plugin.js\n+packages/template-compiler/build.js\n+packages/template-compiler/browser.js\n .vscode\n+dist\n+temp\n+types/v3-generated.d.ts", "filename": ".gitignore", "status": "modified" }, { "diff": "@@ -0,0 +1,5 @@\n+semi: false\n+singleQuote: true\n+printWidth: 80\n+trailingComma: 'none'\n+arrowParens: 'avoid'", "filename": ".prettierrc", "status": "added" }, { "diff": "", "filename": "CHANGELOG.md", "status": "added" }, { "diff": "@@ -17,9 +17,19 @@ You are looking at the repository for Vue 2. The repo for Vue 3 is [vuejs/core](\n \n Vue.js is an MIT-licensed open source project with its ongoing development made possible entirely by the support of these awesome [backers](https://github.com/vuejs/core/blob/main/BACKERS.md). If you'd like to join them, please consider [ sponsor Vue's development](https://vuejs.org/sponsor/).\n \n+<p align=\"center\">\n+ <h3 align=\"center\">Special Sponsor</h3>\n+</p>\n+\n+<p align=\"center\">\n+ <a target=\"_blank\" href=\"https://github.com/appwrite/appwrite\">\n+ <img alt=\"special sponsor appwrite\" src=\"https://sponsors.vuejs.org/images/appwrite.svg\" width=\"300\">\n+ </a>\n+</p>\n+\n <p align=\"center\">\n <a target=\"_blank\" href=\"https://vuejs.org/sponsor/\">\n- <img alt=\"sponsors\" src=\"https://sponsors.vuejs.org/sponsors.svg\">\n+ <img alt=\"sponsors\" src=\"https://sponsors.vuejs.org/sponsors.svg?v2\">\n </a>\n </p>\n \n@@ -73,7 +83,7 @@ Vue.js supports all browsers that are [ES5-compliant](https://kangax.github.io/c\n \n ## Documentation\n \n-To check out [live examples](https://vuejs.org/v2/examples/) and docs, visit [vuejs.org](https://vuejs.org).\n+To check out [live examples](https://v2.vuejs.org/v2/examples/) and docs, visit [vuejs.org](https://v2.vuejs.org).\n \n ## Questions\n ", "filename": "README.md", "status": "modified" }, { "diff": "@@ -0,0 +1,64 @@\n+{\n+ \"$schema\": \"https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json\",\n+\n+ \"projectFolder\": \".\",\n+\n+ \"compiler\": {\n+ \"tsconfigFilePath\": \"api-extractor.tsconfig.json\"\n+ },\n+\n+ \"mainEntryPointFilePath\": \"./temp/src/v3/index.d.ts\",\n+\n+ \"dtsRollup\": {\n+ \"enabled\": true,\n+ \"untrimmedFilePath\": \"\",\n+ \"publicTrimmedFilePath\": \"./types/v3-generated.d.ts\"\n+ },\n+\n+ \"apiReport\": {\n+ \"enabled\": false\n+ },\n+\n+ \"docModel\": {\n+ \"enabled\": false\n+ },\n+\n+ \"tsdocMetadata\": {\n+ \"enabled\": false\n+ },\n+\n+ \"messages\": {\n+ \"compilerMessageReporting\": {\n+ \"default\": {\n+ \"logLevel\": \"warning\"\n+ }\n+ },\n+\n+ \"extractorMessageReporting\": {\n+ \"default\": {\n+ \"logLevel\": \"warning\",\n+ \"addToApiReportFile\": true\n+ },\n+\n+ \"ae-missing-release-tag\": {\n+ \"logLevel\": \"none\"\n+ },\n+ \"ae-internal-missing-underscore\": {\n+ \"logLevel\": \"none\"\n+ },\n+ \"ae-forgotten-export\": {\n+ \"logLevel\": \"none\"\n+ }\n+ },\n+\n+ \"tsdocMessageReporting\": {\n+ \"default\": {\n+ \"logLevel\": \"warning\"\n+ },\n+\n+ \"tsdoc-undefined-tag\": {\n+ \"logLevel\": \"none\"\n+ }\n+ }\n+ }\n+}", "filename": "api-extractor.json", "status": "added" }, { "diff": "@@ -0,0 +1,7 @@\n+{\n+ \"extends\": \"./tsconfig.json\",\n+ \"compilerOptions\": {\n+ \"baseUrl\": \"./temp\",\n+ \"types\": []\n+ }\n+}", "filename": "api-extractor.tsconfig.json", "status": "added" }, { "diff": "@@ -5,7 +5,7 @@\n process.env.NODE_ENV = 'production'\n \n const Vue = require('../../dist/vue.runtime.common.js')\n-const createRenderer = require('../../packages/vue-server-renderer').createRenderer\n+const createRenderer = require('../../packages/server-renderer').createRenderer\n const renderToStream = createRenderer().renderToStream\n const gridComponent = require('./common.js')\n ", "filename": "benchmarks/ssr/renderToStream.js", "status": "modified" }, { "diff": "@@ -3,7 +3,7 @@\n process.env.NODE_ENV = 'production'\n \n const Vue = require('../../dist/vue.runtime.common.js')\n-const createRenderer = require('../../packages/vue-server-renderer').createRenderer\n+const createRenderer = require('../../packages/server-renderer').createRenderer\n const renderToString = createRenderer().renderToString\n const gridComponent = require('./common.js')\n ", "filename": "benchmarks/ssr/renderToString.js", "status": "modified" }, { "diff": "@@ -0,0 +1 @@\n+export * from '@vue/compiler-sfc'", "filename": "compiler-sfc/index.d.ts", "status": "added" }, { "diff": "@@ -0,0 +1 @@\n+module.exports = require('@vue/compiler-sfc')", "filename": "compiler-sfc/index.js", "status": "added" }, { "diff": "@@ -0,0 +1 @@\n+export * from '@vue/compiler-sfc'", "filename": "compiler-sfc/index.mjs", "status": "added" }, { "diff": "@@ -0,0 +1,5 @@\n+{\n+ \"main\": \"index.js\",\n+ \"module\": \"index.mjs\",\n+ \"types\": \"index.d.ts\"\n+}", "filename": "compiler-sfc/package.json", "status": "added" } ] }
{ "body": "### What problem does this feature solve?\r\nCurrently, SSR directives are not applied to used (sub-)components but only to HTML-tags and elements.\r\n\r\n### Reproduction\r\n\r\n```js\r\n// Step 1: Create a Vue instance\r\nconst Vue = require(\"vue\");\r\n\r\nconst Test = {\r\n template: `<div>This should be red</div>`\r\n}\r\n\r\nconst app = new Vue({\r\n template: `\r\n <div>\r\n <Test v-make-red/>\r\n <div v-make-red>This is red</div>\r\n </div>\r\n `,\r\n components: {\r\n Test\r\n }\r\n});\r\n\r\nconst makeRed = (node, dir) => {\r\n const style = node.data.style || (node.data.style = {});\r\n if (Array.isArray(style)) {\r\n style.push({ backgroundColor: \"red\" });\r\n } else {\r\n style.backgroundColor = \"red\";\r\n }\r\n};\r\n\r\n// Step 2: Create a renderer\r\nconst renderer = require(\"vue-server-renderer\").createRenderer({\r\n directives: {\r\n makeRed\r\n }\r\n});\r\n\r\n// Step 3: Render the Vue instance to HTML\r\nrenderer.renderToString(app, (err, html) => {\r\n if (err) throw err;\r\n console.log(html);\r\n // <div data-server-rendered=\"true\"><div>This should be red</div> <div style=\"background-color:red;\">This is red</div></div>\r\n // But should include red background-color style for the first div as well\r\n});\r\n```\r\n\r\nRelated: https://github.com/nuxt/nuxt.js/issues/6575\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "hey guys. i use vue on a daily basis and i would like to contribute. I see here this is a contribution welcome label.\r\ncan someone help me so i can start to fix this if possible?\r\n\r\n(i already forked installed and can run test and build.)", "created_at": "2020-01-22T09:42:09Z" }, { "body": "Hi, any update on this ? still not working with nuxt 2.14", "created_at": "2020-10-14T12:42:40Z" }, { "body": "> Hi, any update on this ? still not working with nuxt 2.14\r\n\r\n+1 \r\nStill not working, \r\nnuxt@2.14.6\r\nvue@2.6.12\r\nvue-server-renderer@2.6.12 ", "created_at": "2020-11-26T20:01:37Z" }, { "body": "I'm rather confused as to why [this fix](https://github.com/vuejs/vue/pull/11287) hasn't been implemented as it appears to completely resolve this issue...", "created_at": "2021-01-11T16:51:05Z" }, { "body": "Any update on this?", "created_at": "2022-01-06T16:39:34Z" } ], "number": 10733, "title": "vue-server-renderer: directive not applied to imported component" }
{ "body": "fix #10733\r\n\r\nSSR render does not execute directives attached to a component, this is happening because during SSR render we're using the component's root element as a source for rendering, thus there's no directive associated with it.\r\n\r\nThis fixes it by copying directives from the Component declaration to the Component's root element, while rendering the root element it references the Component instance vnode to properly render it's modules applying the directives.\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n", "number": 11287, "review_comments": [], "title": "fix(src/server/render.js): Fixes component directives rendering" }
{ "commits": [ { "message": "fix(src/server/render.js): Fixes component directives rendering\n\nComponent directives are not being render during SSR, this fixes it by passing the directives data\nto its first imediate child.\n\nfix #10733" } ], "files": [ { "diff": "@@ -201,6 +201,11 @@ function renderComponentInner (node, isRoot, context) {\n type: 'Component',\n prevActive\n })\n+ if (isDef(node.data) && isDef(node.data.directives)) {\n+ childNode.data = (childNode.data || {})\n+ childNode.data.directives = node.data.directives\n+ childNode.isComponentRootElement = true\n+ }\n renderNode(childNode, isRoot, context)\n }\n \n@@ -364,7 +369,7 @@ function renderStartingTag (node: VNode, context) {\n if (dirRenderer) {\n // directives mutate the node's data\n // which then gets rendered by modules\n- dirRenderer(node, dirs[i])\n+ dirRenderer(node.isComponentRootElement ? node.parent : node, dirs[i])\n }\n }\n }", "filename": "src/server/render.js", "status": "modified" }, { "diff": "@@ -829,7 +829,7 @@ describe('SSR: renderToString', () => {\n })\n })\n \n- it('custom directives', done => {\n+ it('custom directives on raw element', done => {\n const renderer = createRenderer({\n directives: {\n 'class-prefixer': (node, dir) => {\n@@ -861,6 +861,80 @@ describe('SSR: renderToString', () => {\n })\n })\n \n+ it('custom directives on component', done => {\n+ const Test = {\n+ template: '<span>hello world</span>'\n+ }\n+ const renderer = createRenderer({\n+ directives: {\n+ 'class-prefixer': (node, dir) => {\n+ if (node.data.class) {\n+ node.data.class = `${dir.value}-${node.data.class}`\n+ }\n+ if (node.data.staticClass) {\n+ node.data.staticClass = `${dir.value}-${node.data.staticClass}`\n+ }\n+ }\n+ }\n+ })\n+ renderer.renderToString(new Vue({\n+ template: '<p><Test v-class-prefixer=\"\\'my\\'\" class=\"class1\" :class=\"\\'class2\\'\" /></p>',\n+ components: { Test },\n+ }), (err, result) => {\n+ expect(err).toBeNull()\n+ expect(result).toContain('<p data-server-rendered=\"true\"><span class=\"my-class1 my-class2\">hello world</span></p>')\n+ done()\n+ })\n+ })\n+\n+ it('custom directives on element root of a component', done => {\n+ const Test = {\n+ template: '<span v-class-prefixer=\"\\'my\\'\" class=\"class1\" :class=\"\\'class2\\'\">hello world</span>'\n+ }\n+ const renderer = createRenderer({\n+ directives: {\n+ 'class-prefixer': (node, dir) => {\n+ if (node.data.class) {\n+ node.data.class = `${dir.value}-${node.data.class}`\n+ }\n+ if (node.data.staticClass) {\n+ node.data.staticClass = `${dir.value}-${node.data.staticClass}`\n+ }\n+ }\n+ }\n+ })\n+ renderer.renderToString(new Vue({\n+ template: '<p><Test /></p>',\n+ components: { Test },\n+ }), (err, result) => {\n+ expect(err).toBeNull()\n+ expect(result).toContain('<p data-server-rendered=\"true\"><span class=\"my-class1 my-class2\">hello world</span></p>')\n+ done()\n+ })\n+ })\n+\n+ it('custom directives on element with parent element', done => {\n+ const renderer = createRenderer({\n+ directives: {\n+ 'class-prefixer': (node, dir) => {\n+ if (node.data.class) {\n+ node.data.class = `${dir.value}-${node.data.class}`\n+ }\n+ if (node.data.staticClass) {\n+ node.data.staticClass = `${dir.value}-${node.data.staticClass}`\n+ }\n+ }\n+ }\n+ })\n+ renderer.renderToString(new Vue({\n+ template: '<p><span v-class-prefixer=\"\\'my\\'\" class=\"class1\" :class=\"\\'class2\\'\">hello world</span></p>',\n+ }), (err, result) => {\n+ expect(err).toBeNull()\n+ expect(result).toContain('<p data-server-rendered=\"true\"><span class=\"my-class1 my-class2\">hello world</span></p>')\n+ done()\n+ })\n+ })\n+\n it('should not warn for custom directives that do not have server-side implementation', done => {\n renderToString(new Vue({\n directives: {", "filename": "test/ssr/ssr-string.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.11\r\n\r\n### Reproduction link\r\n[https://jsfiddle.net/b71adnym/2/](https://jsfiddle.net/b71adnym/2/)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nNest a component with an inline-template that has a ref in a v-for.\r\n\r\n\r\n### What is expected?\r\ncomponent.$ref.ref should point to the respective element.\r\n\r\n### What is actually happening?\r\ncomponent.$ref.ref is a singleton array with the element reference.\r\n\r\n\r\n---\r\ncheckInFor loops over the element ancestors up to the document root, so it believes that the ref is in a loop, but it actually isn't.\r\nThe check should stop at the component root.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "A patch like this seems to fix the issue for me:\r\n```\r\n function checkInFor(el) {\r\n var parent = el;\r\n while (parent) {\r\n if (parent.attrsMap['inline-template'] != null) {\r\n return false\r\n }\r\n if (parent.for !== undefined) {\r\n return true\r\n }\r\n parent = parent.parent;\r\n }\r\n return false\r\n }\r\n```\r\n", "created_at": "2020-03-26T15:20:08Z" } ], "number": 11248, "title": "\"refInFor\" searches upwards of inline-template boundary" }
{ "body": "fix #11248\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 11284, "review_comments": [], "title": "fix(parser): \"checkInFor\" should search up to inline-template" }
{ "commits": [ { "message": "fix(parser): \"checkInFor\" should search up to inline-template" } ], "files": [ { "diff": "@@ -895,9 +895,8 @@ function processAttrs (el) {\n function checkInFor (el: ASTElement): boolean {\n let parent = el\n while (parent) {\n- if (parent.for !== undefined) {\n- return true\n- }\n+ if (parent.attrsMap['inline-template'] != null) return false;\n+ if (parent.for !== undefined) return true;\n parent = parent.parent\n }\n return false", "filename": "src/compiler/parser/index.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.11\r\n\r\n### Reproduction link\r\n[https://codesandbox.io/embed/heuristic-brown-u7rvw?fontsize=14&hidenavigation=1&theme=dark](https://codesandbox.io/embed/heuristic-brown-u7rvw?fontsize=14&hidenavigation=1&theme=dark)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nClick the tick box.\r\n\r\n### What is expected?\r\nIt should say \"BLUE BOX\" with a blue background colour.\r\n\r\n### What is actually happening?\r\nIt says \"BLUE BOX\" with no background colour.\r\n\r\n---\r\nThe issue stems from the data-v-* attribute not being re-set for the blue_block div when it is swapped to. It is still using the data-v-* attribute of the child div in the functional component. Thus the CSS class styling does not apply. \r\n\r\n\r\nhttps://imgur.com/a/gukDePJ\r\n\r\n\r\nSwapping from a functional component to something else is useful. E.g. When showing a simple loading bar component before swapping to a more complex component.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "I think this may have something to do with how Vue recycles nodes during update. As a workaround, adding a `key` to both the component and the div yields the expected behavior.", "created_at": "2020-03-02T18:15:38Z" }, { "body": "Might be related to https://github.com/vuejs/vue/issues/10416", "created_at": "2020-03-03T17:06:04Z" }, { "body": "test", "created_at": "2020-04-10T07:22:12Z" }, { "body": "I don't think this is a problem.\r\n\r\nAccording to the comment [here](https://github.com/vuejs/vue/blob/dev/test/unit/features/options/_scopeId.spec.js#L92) :\r\n\r\n> functional component with scopeId will not inherit parent scopeId\r\n\r\nThe functional components **should not** inherit parent if the parent is scoped.\r\n\r\nI think we have a workarounds here: we can remove the `scoped` attribute in the parent component. Seems it works :\r\nhttps://codesandbox.io/embed/vigorous-frost-t1f84", "created_at": "2020-12-25T02:33:51Z" } ], "number": 11171, "title": "Swapping from functional to non-functional component using v-if does not change v-data-* attribute." }
{ "body": "fix functional component update scope attribute when patch use same node\r\n\r\nfix #11171\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 11247, "review_comments": [], "title": "fix(patch.js): fix functional component scope attribute update" }
{ "commits": [ { "message": "fix(patch.js): fix functional component scope attribute update\n\nfix functional component update scope attribute when patch use same node\n\nfix #11171" } ], "files": [ { "diff": "@@ -547,6 +547,9 @@ export function createPatchFunction (backend) {\n \n const oldCh = oldVnode.children\n const ch = vnode.children\n+ if (isDef(vnode.fnScopeId)) {\n+ setScope(vnode)\n+ }\n if (isDef(data) && isPatchable(vnode)) {\n for (i = 0; i < cbs.update.length; ++i) cbs.update[i](oldVnode, vnode)\n if (isDef(i = data.hook) && isDef(i = i.update)) i(oldVnode, vnode)", "filename": "src/core/vdom/patch.js", "status": "modified" }, { "diff": "@@ -1,4 +1,5 @@\n import Vue from 'vue'\n+import { patch } from 'web/runtime/patch'\n \n describe('vdom patch: edge cases', () => {\n // exposed by #3406\n@@ -432,4 +433,25 @@ describe('vdom patch: edge cases', () => {\n expect(vm.$el.textContent).not.toMatch('Infinity')\n }).then(done)\n })\n+\n+ // #11171\n+ it(\"should replace functional element scope attribute\", () => {\n+ const vm = new Vue({\n+ components: {\n+ foo: {\n+ functional: true,\n+ _scopeId: \"foo\",\n+ render (h) {\n+ return h('div')\n+ }\n+ }\n+ }\n+ })\n+ const h = vm.$createElement\n+ const vnode = h('foo')\n+ const oldVnode = h(\"div\")\n+ patch(null, oldVnode)\n+ let elm = patch(oldVnode, vnode)\n+ expect(elm.hasAttribute(\"foo\")).toBe(true)\n+ })\n })", "filename": "test/unit/modules/vdom/patch/edge-cases.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n[https://codesandbox.io/s/codesandbox-nuxt-vdcv8](https://codesandbox.io/s/codesandbox-nuxt-vdcv8)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nView page source\r\n\r\n### What is expected?\r\n```html\r\n<textarea id=\"input-62\" rows=\"5\"></textarea>\r\n```\r\n\r\n### What is actually happening?\r\n```html\r\n<textarea id=\"input-62\" rows=\"5\">null</textarea>\r\n```\r\n\r\n---\r\nSimilar to #9231\r\n\r\nRepro in `vue/test/ssr/ssr-string.spec.js`:\r\n```js\r\n it('falsy domProps value', done => {\r\n renderVmWithOptions({\r\n render (h) {\r\n return h('div', [\r\n h('textarea', {\r\n domProps: {\r\n value: null\r\n }\r\n })\r\n ])\r\n }\r\n }, result => {\r\n expect(result).toContain(\r\n '<div data-server-rendered=\"true\"><textarea></textarea></div>'\r\n )\r\n done()\r\n })\r\n })\r\n```\r\n\r\nRelevant vuetify code:\r\nhttps://github.com/vuetifyjs/vuetify/blob/243a7c34a1c58dff3753ad35dded13ba5002c8eb/packages/vuetify/src/components/VTextarea/VTextarea.ts#L86-L92\r\nhttps://github.com/vuetifyjs/vuetify/blob/243a7c34a1c58dff3753ad35dded13ba5002c8eb/packages/vuetify/src/components/VTextField/VTextField.ts#L357-L361\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "Are you sure this comes from vue or that it hasn't been fixed already but not released? I added your test to the test suite and it passes", "created_at": "2019-11-07T09:02:26Z" }, { "body": "Yep\r\n\r\nCould it be a node version thing (seems unlikely)? I'm on 12.5.0\r\n\r\n```\r\n~/Documents/vuejs/vue dev*\r\n❯ git rev-parse HEAD \r\n\r\n4821149b8bbd4650b1d9c9c3cfbb539ac1e24589\r\n\r\n~/Documents/vuejs/vue dev*\r\n❯ git --no-pager diff\r\ndiff --git a/test/ssr/ssr-string.spec.js b/test/ssr/ssr-string.spec.js\r\nindex e18ca2ae..7dbb1ffb 100644\r\n--- a/test/ssr/ssr-string.spec.js\r\n+++ b/test/ssr/ssr-string.spec.js\r\n@@ -4,6 +4,25 @@ import { createRenderer } from '../../packages/vue-server-renderer'\r\n const { renderToString } = createRenderer()\r\n \r\n describe('SSR: renderToString', () => {\r\n+ it('falsy domProps value', done => {\r\n+ renderVmWithOptions({\r\n+ render (h) {\r\n+ return h('div', [\r\n+ h('textarea', {\r\n+ domProps: {\r\n+ value: null\r\n+ }\r\n+ })\r\n+ ])\r\n+ }\r\n+ }, result => {\r\n+ expect(result).toContain(\r\n+ '<div data-server-rendered=\"true\"><textarea></textarea></div>'\r\n+ )\r\n+ done()\r\n+ })\r\n+ })\r\n+\r\n it('static attributes', done => {\r\n renderVmWithOptions({\r\n template: '<div id=\"foo\" bar=\"123\"></div>'\r\n\r\n~/Documents/vuejs/vue dev*\r\n❯ yarn test:ssr\r\nyarn run v1.19.1\r\n$ npm run build:ssr && jasmine JASMINE_CONFIG_PATH=test/ssr/jasmine.js\r\nnpm WARN lifecycle The node binary used for scripts is /tmp/yarn--1573128640608-0.24686075565520493/node but npm is using /home/kael/.nvm/versions/node/v12.5.0/bin/node itself. Use the `--scripts-prepend-node-path` option to include the path for the node binary npm was executed with.\r\n\r\n> vue@2.6.10 build:ssr /home/kael/Documents/vuejs/vue\r\n> npm run build -- web-runtime-cjs,web-server-renderer\r\n\r\nnpm WARN lifecycle The node binary used for scripts is /tmp/yarn--1573128640608-0.24686075565520493/node but npm is using /home/kael/.nvm/versions/node/v12.5.0/bin/node itself. Use the `--scripts-prepend-node-path` option to include the path for the node binary npm was executed with.\r\n\r\n> vue@2.6.10 build /home/kael/Documents/vuejs/vue\r\n> node scripts/build.js \"web-runtime-cjs,web-server-renderer\"\r\n\r\ndist/vue.runtime.common.dev.js 217.93kb\r\ndist/vue.runtime.common.prod.js 63.19kb (gzipped: 22.83kb)\r\npackages/vue-server-renderer/build.dev.js 247.54kb\r\npackages/vue-server-renderer/build.prod.js 80.21kb (gzipped: 29.28kb)\r\npackages/vue-server-renderer/basic.js 334.34kb\r\npackages/vue-server-renderer/server-plugin.js 2.91kb\r\npackages/vue-server-renderer/client-plugin.js 3.03kb\r\nBrowserslist: caniuse-lite is outdated. Please run next command `yarn upgrade caniuse-lite browserslist`\r\nStarted\r\n........................................F................................................................................................................\r\n\r\nFailures:\r\n1) SSR: renderToString falsy domProps value\r\n Message:\r\n Expected '<div data-server-rendered=\"true\"><textarea>null</textarea></div>' to contain '<div data-server-rendered=\"true\"><textarea></textarea></div>'.\r\n Stack:\r\n Error: Expected '<div data-server-rendered=\"true\"><textarea>null</textarea></div>' to contain '<div data-server-rendered=\"true\"><textarea></textarea></div>'.\r\n at toContain (/home/kael/Documents/vuejs/vue/test/ssr/ssr-string.spec.js:19:22)\r\n at cb (/home/kael/Documents/vuejs/vue/test/ssr/ssr-string.spec.js:1621:5)\r\n at RenderContext.cb [as done] (/home/kael/Documents/vuejs/vue/packages/vue-server-renderer/build.dev.js:9205:13)\r\n at RenderContext.done (/home/kael/Documents/vuejs/vue/packages/vue-server-renderer/build.dev.js:2588:19)\r\n\r\n153 specs, 1 failure\r\nFinished in 8.334 seconds\r\n\r\nerror Command failed with exit code 1.\r\ninfo Visit https://yarnpkg.com/en/docs/cli/run for documentation about this command.\r\n```", "created_at": "2019-11-07T12:13:28Z" }, { "body": "I was running the unit test suite, I forgot we have different script for that 🤦‍♂️", "created_at": "2019-11-08T08:02:42Z" }, { "body": "@posva Hi! I sent a pull request for this issue. Could you review it for me? Please, feel free to assign it to anybody else if you want to. Thanks!", "created_at": "2020-02-18T19:10:36Z" } ], "number": 10803, "title": "SSR: textarea domProps keeps falsy values" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\nCloses #10803 ", "number": 11121, "review_comments": [], "title": "fix(ssr): textarea keeps undefined/null values" }
{ "commits": [ { "message": "fix(ssr): textarea keeps undefined/null values" }, { "message": "refactor(ssr): Code review changes for null values in textareas" } ], "files": [ { "diff": "@@ -2,7 +2,7 @@\n \n import VNode from 'core/vdom/vnode'\n import { renderAttr } from './attrs'\n-import { isDef, isUndef, extend } from 'shared/util'\n+import { isDef, isUndef, extend, toString } from 'shared/util'\n import { propsToAttrMap, isRenderableAttr } from '../util'\n \n export default function renderDOMProps (node: VNodeWithData): string {\n@@ -28,7 +28,7 @@ export default function renderDOMProps (node: VNodeWithData): string {\n } else if (key === 'textContent') {\n setText(node, props[key], false)\n } else if (key === 'value' && node.tag === 'textarea') {\n- setText(node, props[key], false)\n+ setText(node, toString(props[key]), false)\n } else {\n // $flow-disable-line (WTF?)\n const attr = propsToAttrMap[key] || key.toLowerCase()", "filename": "src/platforms/web/server/modules/dom-props.js", "status": "modified" }, { "diff": "@@ -1594,6 +1594,25 @@ describe('SSR: renderToString', () => {\n \n renderToString(vueInstance, err => done(err))\n })\n+\n+ it('undefined v-model with textarea', done => {\n+ renderVmWithOptions({\n+ render (h) {\n+ return h('div', [\n+ h('textarea', {\n+ domProps: {\n+ value: null\n+ }\n+ })\n+ ])\n+ }\n+ }, result => {\n+ expect(result).toContain(\n+ '<div data-server-rendered=\"true\"><textarea></textarea></div>'\n+ )\n+ done()\n+ })\n+ })\n })\n \n function renderVmWithOptions (options, cb) {", "filename": "test/ssr/ssr-string.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.11\r\n\r\n### Reproduction link\r\n[https://codepen.io/deqwin/pen/wvaMxoO](https://codepen.io/deqwin/pen/wvaMxoO)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nopen the reproduction above and you will see a red vue warning in the console\r\n\r\n### What is expected?\r\na successful hydration\r\n\r\n### What is actually happening?\r\nthe hydration fails and the app re-renders absolutely\r\n\r\n---\r\nthis looks like the problem caused by the empty text node (src/core/vdom/patch.js line:646)\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "hello do well ", "created_at": "2020-03-04T03:46:23Z" }, { "body": "Hello,Modify it so that there is no error,beacuse,The client-side rendered virtual DOM tree is not matching server-rendered content. You have one more {{value}} .\r\n```html\r\n<div id=\"app\" data-server-rendered=\"true\"><span>123</span></div>\r\n<template id=\"tpl\">\r\n <div id=\"app\"><span>123</span></div>\r\n</template>\r\n```", "created_at": "2020-04-08T03:05:34Z" }, { "body": "> Hello,Modify it so that there is no error,beacuse,The client-side rendered virtual DOM tree is not matching server-rendered content. You have one more {{value}} .\r\n> \r\n> ```\r\n> <div id=\"app\" data-server-rendered=\"true\"><span>123</span></div>\r\n> <template id=\"tpl\">\r\n> <div id=\"app\"><span>123</span></div>\r\n> </template>\r\n> ```\r\n\r\nYes, it works. But it's not easy to modify because i have written this many times in my project.", "created_at": "2020-04-13T02:01:26Z" } ], "number": 11109, "title": "Hydration fails when write like \"<div><span>123</span>{{value}}</div>\" in the template and the data \"value\" is a empty string" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\nCloses #11109 ", "number": 11111, "review_comments": [ { "body": "```suggestion\r\n it('should not fail hydration with empty text vnodes children', () => {\r\n```", "created_at": "2020-02-15T15:57:18Z" }, { "body": "This results in child.elm === undefined and causes a crash in next patch.\r\nReproduction: https://github.com/deqwin/vue/compare/fix_ssr_hydration...contribu:fix_ssr_hydration_crash\r\n\r\nI think it is better to treat empty string nodes like https://github.com/vuejs/vue/issues/5117", "created_at": "2020-02-15T16:49:54Z" }, { "body": "Thanks for your suggestions.", "created_at": "2020-02-16T02:19:28Z" }, { "body": "The fact is not that the vnode tree has an extra empty text node, but the DOM tree is missing a necessary empty text node.", "created_at": "2020-02-16T02:22:20Z" }, { "body": "@posva I have modified the code to deal with this problem and also updated the test.", "created_at": "2020-02-16T03:38:21Z" }, { "body": "Thanks!\r\nI am unsure if this causes reflow during hydration and if the existing hydration code allows it.\r\nI think it shouldn't cause reflow if the existing hydration code disallows reflow.", "created_at": "2020-02-16T04:17:25Z" }, { "body": "Yeah, it‘s worth further studying. However, it is necessary to keep the vnode structure and DOM structure consistent at all times. So I think it's ok to do this step here.", "created_at": "2020-02-16T08:43:17Z" }, { "body": "Another solution is rendering empty text nodes as comment nodes in SSR like async placeholder or v-if=\"false\".\r\nI am unsure which solution is better.", "created_at": "2020-02-19T10:58:34Z" } ], "title": "fix(ssr): ignore empty text vnode when hydrating (fix #11109)" }
{ "commits": [ { "message": "fix(ssr): ignore empty text vnode when hydrating" }, { "message": "Update test/unit/modules/vdom/patch/hydration.spec.js\n\nCo-Authored-By: Eduardo San Martin Morote <posva@users.noreply.github.com>" }, { "message": "fix(ssr): ignore empty text vnode but create a empty textNode for next patch when hydrating" } ], "files": [ { "diff": "@@ -23,6 +23,7 @@ import {\n isDef,\n isUndef,\n isTrue,\n+ isFalse,\n makeMap,\n isRegExp,\n isPrimitive\n@@ -643,7 +644,13 @@ export function createPatchFunction (backend) {\n let childrenMatch = true\n let childNode = elm.firstChild\n for (let i = 0; i < children.length; i++) {\n- if (!childNode || !hydrate(childNode, children[i], insertedVnodeQueue, inVPre)) {\n+ const child = children[i]\n+ // ignore empty text vnode but create a empty textNode for next patch\n+ if (isUndef(child.tag) && isFalse(child.isComment) && child.text === '') {\n+ createElm(child, insertedVnodeQueue, elm, childNode)\n+ continue\n+ }\n+ if (!childNode || !hydrate(childNode, child, insertedVnodeQueue, inVPre)) {\n childrenMatch = false\n break\n }", "filename": "src/core/vdom/patch.js", "status": "modified" }, { "diff": "@@ -388,4 +388,24 @@ describe('vdom patch: hydration', () => {\n expect(dom.children[0].className).toBe('bar')\n }).then(done)\n })\n+\n+ // #11109\n+ it('should not fail hydration with empty text vnodes children', done => {\n+ const dom = createMockSSRDOM('<div class=\"bar\"><span>bar</span></div>')\n+\n+ const vm = new Vue({\n+ data: {\n+ a: ''\n+ },\n+ template: `<div><div class=\"bar\"><span>bar</span>{{a}}</div></div>`\n+ }).$mount(dom)\n+\n+ expect('not matching server-rendered content').not.toHaveBeenWarned()\n+\n+ // should update\n+ vm.a = 'foo'\n+ waitForUpdate(() => {\n+ expect(dom.children[0].innerHTML).toBe('<span>bar</span>foo')\n+ }).then(done)\n+ })\n })", "filename": "test/unit/modules/vdom/patch/hydration.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.11\r\n\r\n### Reproduction link\r\n[https://gist.github.com/aimozg/c073eea5a55062e3dcd8f887a8567807](https://gist.github.com/aimozg/c073eea5a55062e3dcd8f887a8567807)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nRun example.js in Node.\r\n\r\nAlternative steps/Detailed explanation:\r\n1. Setup Vue template compiler with `whitespace: 'condense'`. (it is default for vue-cli).\r\n2. Render template with `&nbsp;` entity, e.g. `<p>a&nbsp;b</p>`\r\n3. Inspect template for actual non-breaking space character\r\n\r\n### What is expected?\r\n`&nbsp;` entity rendered as non-breaking space character; example.js output should contain\r\n```\r\n---- whitespace: condense\r\n// code\r\nNBSP FOUND\r\n```\r\n\r\n### What is actually happening?\r\n`&nbsp;` entity is rendered as plain space, example.js output contains\r\n```\r\n---- whitespace: condense\r\n// code\r\nNBSP NOT FOUND\r\n```\r\n\r\n---\r\nThe whitespace is stripped by this compiler code:\r\n\r\n```js\r\nif (!inPre && whitespaceOption === 'condense') {\r\n // condense consecutive whitespaces into single space\r\n text = text.replace(whitespaceRE, ' ')\r\n}\r\n```\r\n\r\nand `\"&nbsp;\"`s are replaced by `\"\\xA0\"`s with entity decoder earlier.\r\n\r\nRelated issues is https://github.com/vuejs/vue/issues/10485\r\n\r\n**Suggestion**:\r\n\r\nNarrower whitespaceRE regexp, like `/[ \\t\\r\\n]+/g`.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "Shouldn't we not touch non-breakable spaces in general? That's what I would personally expect if I use one in my template. It would also fix the issue you mentioned", "created_at": "2020-01-29T10:31:17Z" }, { "body": "> Setup Vue template compiler with whitespace: 'condense'. **(it is default for vue-cli)**.\r\n\r\nWait, I'm confused. Is `condense` really the default? I'm seeing multiple things that claim `preserve` is actually the default.\r\n\r\nI've been struggling to figure out why none of my non-breakable spaces are working. I was going to try forcing preserve, but then saw: \r\n\r\nEvan saying default is `preserve` https://github.com/vuejs/vue/issues/9208#issuecomment-450012518\r\n\r\nAnd compiler option docs that echo that statement https://github.com/vuejs/vue/tree/dev/packages/vue-template-compiler#options\r\n\r\nNow after seeing @aimozg say `condense` is actually default, I went ahead and manually set `preserve` and sure enough! Everything's back to normal!\r\n\r\nAre the docs/implementation wrong?\r\n\r\nEither way, why would condense eliminate nbsp? Very unexpected behavior. I agree they shouldn't be touched.\r\n", "created_at": "2020-04-30T20:50:02Z" }, { "body": "White spaces are preserved in Vue core to ensure backward compatibility while in latest versions of Vue CLI we configure that to `condense` by default.", "created_at": "2020-05-01T02:07:26Z" }, { "body": "same with escaped tab `&#9;`\r\n\r\nMakes very hard to display the code", "created_at": "2020-05-24T10:35:52Z" }, { "body": "Waiting for this to be merged..\r\nCurrent workaround\r\nUse\r\n```\r\n{{'\\xa0'}}\r\n```\r\ninstead of \r\n```\r\n&nbsp;\r\n```", "created_at": "2020-09-16T11:55:42Z" } ], "number": 11059, "title": "With `whitespace:condense` all &nbsps;, even meaningful, are converted to spaces" }
{ "body": "The regex for whitespace was too strict and was causing $nbps; chars to\r\ndisappear from templates when `whitespace: 'condense'` is set. Changing the rule\r\nto avoid converting non-breaking white space chars into regular spaces.\r\n\r\nFixes #10485\r\nFixes #11059\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n", "number": 11065, "review_comments": [ { "body": "Maybe we should use *space characters* defined in HTML spec [here](https://www.w3.org/TR/html52/infrastructure.html#space-characters)?", "created_at": "2020-02-02T17:24:41Z" }, { "body": "Yeah, thats a good idea. The only possible char missing from the regex at this point would be `U+000C FORM FEED (FF)`", "created_at": "2020-02-03T15:23:34Z" } ], "title": "fix(compiler): avoid converting &nbps; to spaces" }
{ "commits": [ { "message": "fix(compiler): avoid converting &nbps; to spaces (fix #11059)\n\nThe regex for whitespace was too strict and was causing $nbps; chars to\ndisappear from templates when `whitespace: 'condense'` is set. Changing the rule\nto avoid converting non-breaking white space chars into regular spaces." }, { "message": "perf(compiler): include form-feed char as part of whitespace regex" } ], "files": [ { "diff": "@@ -38,7 +38,7 @@ const modifierRE = /\\.[^.\\]]+(?=[^\\]]*$)/g\n const slotRE = /^v-slot(:|$)|^#/\n \n const lineBreakRE = /[\\r\\n]/\n-const whitespaceRE = /\\s+/g\n+const whitespaceRE = /[ \\f\\t\\r\\n]+/g\n \n const invalidAttributeRE = /[\\s\"'<>\\/=]/\n ", "filename": "src/compiler/parser/index.js", "status": "modified" }, { "diff": "@@ -845,6 +845,14 @@ describe('parser', () => {\n expect(ast.children[4].children[0].text).toBe('. Have fun! ')\n })\n \n+ it(`maintains &nbsp; with whitespace: 'condense'`, () => {\n+ const options = extend({}, condenseOptions)\n+ const ast = parse('<span>&nbsp;</span>', options)\n+ const code = ast.children[0]\n+ expect(code.type).toBe(3)\n+ expect(code.text).toBe('\\xA0')\n+ })\n+\n it(`preserve whitespace in <pre> tag with whitespace: 'condense'`, function () {\n const options = extend({}, condenseOptions)\n const ast = parse('<pre><code> \\n<span>hi</span>\\n </code><span> </span></pre>', options)", "filename": "test/unit/modules/compiler/parser.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n4.0.0-rc.1\r\n\r\n### Reproduction link\r\n[https://github.com/tobyzerner/vue-cli-nbsp-bug](https://github.com/tobyzerner/vue-cli-nbsp-bug)\r\n\r\n\r\n\r\n\r\n\r\n### Environment info\r\n```\r\nEnvironment Info:\r\n System:\r\n OS: macOS 10.14.6\r\n CPU: (4) x64 Intel(R) Core(TM) i5-7360U CPU @ 2.30GHz\r\n Binaries:\r\n Node: 10.15.3 - /usr/local/bin/node\r\n Yarn: 1.17.3 - ~/.npm-global/bin/yarn\r\n npm: 6.11.3 - ~/.npm-global/bin/npm\r\n Browsers:\r\n Chrome: 76.0.3809.132\r\n Firefox: 68.0.2\r\n Safari: 12.1.2\r\n npmPackages:\r\n @vue/babel-helper-vue-jsx-merge-props: 1.0.0\r\n @vue/babel-plugin-transform-vue-jsx: 1.0.0\r\n @vue/babel-preset-app: 4.0.0-rc.1\r\n @vue/babel-preset-jsx: 1.1.0\r\n @vue/babel-sugar-functional-vue: 1.0.0\r\n @vue/babel-sugar-inject-h: 1.0.0\r\n @vue/babel-sugar-v-model: 1.0.0\r\n @vue/babel-sugar-v-on: 1.1.0\r\n @vue/cli-overlay: 4.0.0-rc.1\r\n @vue/cli-plugin-babel: ^4.0.0-rc.1 => 4.0.0-rc.1\r\n @vue/cli-plugin-eslint: ^4.0.0-rc.1 => 4.0.0-rc.1\r\n @vue/cli-service: ^4.0.0-rc.1 => 4.0.0-rc.1\r\n @vue/cli-shared-utils: 4.0.0-rc.1\r\n @vue/component-compiler-utils: 3.0.0\r\n @vue/preload-webpack-plugin: 1.1.1\r\n @vue/web-component-wrapper: 1.2.0\r\n eslint-plugin-vue: ^5.0.0 => 5.2.3\r\n vue: ^2.6.10 => 2.6.10\r\n vue-eslint-parser: 5.0.0\r\n vue-hot-reload-api: 2.3.3\r\n vue-loader: 15.7.1\r\n vue-style-loader: 4.1.2\r\n vue-template-compiler: ^2.6.10 => 2.6.10\r\n vue-template-es2015-compiler: 1.9.1\r\n npmGlobalPackages:\r\n @vue/cli: 4.0.0-rc.1\r\n```\r\n\r\n\r\n### Steps to reproduce\r\nClone the repo, `npm install && npm run serve`\r\n\r\n### What is expected?\r\nThe template contains a span containing a `&nbsp;` character. This span has a red background. Thus you should see a small red box on the page.\r\n\r\n### What is actually happening?\r\nThe `&nbsp;` in the template is being ignored and converted into a normal space, which means the span element is not displayed.\r\n\r\n---\r\nI cannot replicate this bug in a plain Vue jsfiddle, nor a project using vue-loader without vue-cli. Thus I think something in vue-cli may be the culprit rather than vue-template-compiler or Vue itself.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "I've transferred this issue to the `vue` repository because I believe it's a bug introduced in https://github.com/vuejs/vue/commit/e1abedb9e66b21da8a7e93e175b9dabe334dfebd\r\n\r\nSee another bug report at https://github.com/vuejs/vue/issues/9208#issuecomment-477811927\r\n\r\nCurrent workaround:\r\n\r\n```js\r\n// vue.config.js\r\nmodule.exports = {\r\n chainWebpack: config => {\r\n config.module\r\n .rule('vue')\r\n .use('vue-loader')\r\n .tap(args => {\r\n args.compilerOptions.whitespace = 'preserve'\r\n })\r\n }\r\n}\r\n```", "created_at": "2019-09-06T06:33:09Z" }, { "body": "Would it be possible to ignore hairspaces (`&hairsp;`) as well..?\r\n\r\n_edit: sorry, didn't see the issue link_", "created_at": "2020-03-31T19:30:02Z" }, { "body": "@naton as mentioned, this should be discussed in the vue repo. Sodatea linked the issue", "created_at": "2020-03-31T19:36:48Z" }, { "body": "@LinusBorg isn't this the Vue repo? The linked issue is the original change to add 'condense' is it not? It is also closed.\r\n\r\nWe just ran into this today and are trying to figure out how to work around it. We've built in some localization automation that assumes the 'condense' setting (based on Evan's suggestion that this would be default in 3.x and because I agree with the condense behaviour). So it makes it difficult to just swap back to preserve without potential side effects.", "created_at": "2020-04-09T21:25:40Z" }, { "body": "Ok it seems that simply not including the nbsp; inside an element by itself solves the issue. Adding this as a note for anyone else who runs into this.", "created_at": "2020-04-09T21:27:34Z" }, { "body": "> I've transferred this issue to the `vue` repository because I believe it's a bug introduced in [e1abedb](https://github.com/vuejs/vue/commit/e1abedb9e66b21da8a7e93e175b9dabe334dfebd)\r\n> \r\n> See another bug report at [#9208 (comment)](https://github.com/vuejs/vue/issues/9208#issuecomment-477811927)\r\n> \r\n> Current workaround:\r\n> \r\n> ```js\r\n> // vue.config.js\r\n> module.exports = {\r\n> chainWebpack: config => {\r\n> config.module\r\n> .rule('vue')\r\n> .use('vue-loader')\r\n> .tap(args => {\r\n> args.compilerOptions.whitespace = 'preserve'\r\n> })\r\n> }\r\n> }\r\n> ```\r\n\r\nThis doesn't seem to work anymore with Vue 3, it throws the error: \"TypeError: Cannot set property 'whitespace' of undefined\". Any workaround for Vue 3?", "created_at": "2020-09-29T02:33:58Z" } ], "number": 10485, "title": "With `whitespace: 'condense'`, elements contain only &nbsp; and spaces are condensed into empty tags" }
{ "body": "The regex for whitespace was too strict and was causing $nbps; chars to\r\ndisappear from templates when `whitespace: 'condense'` is set. Changing the rule\r\nto avoid converting non-breaking white space chars into regular spaces.\r\n\r\nFixes #10485\r\nFixes #11059\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n", "number": 11065, "review_comments": [ { "body": "Maybe we should use *space characters* defined in HTML spec [here](https://www.w3.org/TR/html52/infrastructure.html#space-characters)?", "created_at": "2020-02-02T17:24:41Z" }, { "body": "Yeah, thats a good idea. The only possible char missing from the regex at this point would be `U+000C FORM FEED (FF)`", "created_at": "2020-02-03T15:23:34Z" } ], "title": "fix(compiler): avoid converting &nbps; to spaces" }
{ "commits": [ { "message": "fix(compiler): avoid converting &nbps; to spaces (fix #11059)\n\nThe regex for whitespace was too strict and was causing $nbps; chars to\ndisappear from templates when `whitespace: 'condense'` is set. Changing the rule\nto avoid converting non-breaking white space chars into regular spaces." }, { "message": "perf(compiler): include form-feed char as part of whitespace regex" } ], "files": [ { "diff": "@@ -38,7 +38,7 @@ const modifierRE = /\\.[^.\\]]+(?=[^\\]]*$)/g\n const slotRE = /^v-slot(:|$)|^#/\n \n const lineBreakRE = /[\\r\\n]/\n-const whitespaceRE = /\\s+/g\n+const whitespaceRE = /[ \\f\\t\\r\\n]+/g\n \n const invalidAttributeRE = /[\\s\"'<>\\/=]/\n ", "filename": "src/compiler/parser/index.js", "status": "modified" }, { "diff": "@@ -845,6 +845,14 @@ describe('parser', () => {\n expect(ast.children[4].children[0].text).toBe('. Have fun! ')\n })\n \n+ it(`maintains &nbsp; with whitespace: 'condense'`, () => {\n+ const options = extend({}, condenseOptions)\n+ const ast = parse('<span>&nbsp;</span>', options)\n+ const code = ast.children[0]\n+ expect(code.type).toBe(3)\n+ expect(code.text).toBe('\\xA0')\n+ })\n+\n it(`preserve whitespace in <pre> tag with whitespace: 'condense'`, function () {\n const options = extend({}, condenseOptions)\n const ast = parse('<pre><code> \\n<span>hi</span>\\n </code><span> </span></pre>', options)", "filename": "test/unit/modules/compiler/parser.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n[https://jsfiddle.net/50wL7mdz/30115/](https://jsfiddle.net/50wL7mdz/30115/)\r\n\r\n### Steps to reproduce\r\nAdd a modifier to one of the event listeners. e.g. @emission.once\r\n\r\n### What is expected?\r\nThat all arguments will be passed to the function\r\n\r\n### What is actually happening?\r\nOnly the first argument is passed\r\n\r\n---\r\nMy guess is that it lies in the template compiler which creates a function($event){...} and calling the handler with $event instead of ...arguments?\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "As a workaround you can use this version\r\n\r\n```js\r\n <component @emission.once=\"hearEmission('extra', ...arguments)\">with extras, incomplete data</component>\r\n```\r\n\r\nThe generated js code should be different: https://template-explorer.vuejs.org/#%3Cdiv%20id%3D%22app%22%3E%0A%20%20%3Ccomponent%20%40emission.once%3D%22hearEmission%22%3Ecomplete%20data%2C%20without%20extras%3C%2Fcomponent%3E%0A%20%20%3Ccomponent%20%40emission.once%3D%22hearEmission('extra'%2C%20...arguments)%22%3Ewith%20extras%2C%20incomplete%20data%3C%2Fcomponent%3E%0A%20%20%3Ccomponent%20%40emission.once%3D%22function%20(a%2C%20b%2C%20c)%20%7B%20hearEmission('extra'%2C%20a%2C%20b%2C%20c)%20%7D%22%3Eexpected%2C%20overly%20explicit%3C%2Fcomponent%3E%0A%20%20%0A%3C%2Fdiv%3E\r\n\r\nThe first one should be:\r\n\r\n```js\r\non: {\r\n \"~emission\": hearEmission\r\n }\r\n```\r\n\r\nThe second one shouldn't change and the last one might be a bit more complicated as there should be quite a few edge cases but if we use an `apply` we should be able to keep a very similar version", "created_at": "2019-11-26T11:24:36Z" }, { "body": "Thanks for the quick response and for the link", "created_at": "2019-11-27T13:09:33Z" } ], "number": 10867, "title": "Multiple arguments in custom event with a modifier are not passed to handler" }
{ "body": "Pass full parent argument list to the event handler when there are event\r\nmodifiers present.\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nFix #10867 ", "number": 10958, "review_comments": [], "title": "fix(compiler): event handler with modifiers swallowing arguments" }
{ "commits": [ { "message": "fix(compiler): event handlers with modifiers swallowing arguments (fix #10867)\n\nPass full parent argument list to the event handler when there are event\nmodifiers present." } ], "files": [ { "diff": "@@ -148,9 +148,9 @@ function genHandler (handler: ASTElementHandler | Array<ASTElementHandler>): str\n code += genModifierCode\n }\n const handlerCode = isMethodPath\n- ? `return ${handler.value}($event)`\n+ ? `return ${handler.value}.apply(null, arguments)`\n : isFunctionExpression\n- ? `return (${handler.value})($event)`\n+ ? `return (${handler.value}).apply(null, arguments)`\n : isFunctionInvocation\n ? `return ${handler.value}`\n : handler.value", "filename": "src/compiler/codegen/events.js", "status": "modified" }, { "diff": "@@ -368,127 +368,127 @@ describe('codegen', () => {\n it('generate events with keycode', () => {\n assertCodegen(\n '<input @input.enter=\"onInput\">',\n- `with(this){return _c('input',{on:{\"input\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"enter\",13,$event.key,\"Enter\"))return null;return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"input\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"enter\",13,$event.key,\"Enter\"))return null;return onInput.apply(null, arguments)}}})}`\n )\n // multiple keycodes (delete)\n assertCodegen(\n '<input @input.delete=\"onInput\">',\n- `with(this){return _c('input',{on:{\"input\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"delete\",[8,46],$event.key,[\"Backspace\",\"Delete\",\"Del\"]))return null;return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"input\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"delete\",[8,46],$event.key,[\"Backspace\",\"Delete\",\"Del\"]))return null;return onInput.apply(null, arguments)}}})}`\n )\n // multiple keycodes (esc)\n assertCodegen(\n '<input @input.esc=\"onInput\">',\n- `with(this){return _c('input',{on:{\"input\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"esc\",27,$event.key,[\"Esc\",\"Escape\"]))return null;return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"input\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"esc\",27,$event.key,[\"Esc\",\"Escape\"]))return null;return onInput.apply(null, arguments)}}})}`\n )\n // multiple keycodes (space)\n assertCodegen(\n '<input @input.space=\"onInput\">',\n- `with(this){return _c('input',{on:{\"input\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"space\",32,$event.key,[\" \",\"Spacebar\"]))return null;return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"input\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"space\",32,$event.key,[\" \",\"Spacebar\"]))return null;return onInput.apply(null, arguments)}}})}`\n )\n // multiple keycodes (chained)\n assertCodegen(\n '<input @keydown.enter.delete=\"onInput\">',\n- `with(this){return _c('input',{on:{\"keydown\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"enter\",13,$event.key,\"Enter\")&&_k($event.keyCode,\"delete\",[8,46],$event.key,[\"Backspace\",\"Delete\",\"Del\"]))return null;return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"keydown\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"enter\",13,$event.key,\"Enter\")&&_k($event.keyCode,\"delete\",[8,46],$event.key,[\"Backspace\",\"Delete\",\"Del\"]))return null;return onInput.apply(null, arguments)}}})}`\n )\n // number keycode\n assertCodegen(\n '<input @input.13=\"onInput\">',\n- `with(this){return _c('input',{on:{\"input\":function($event){if(!$event.type.indexOf('key')&&$event.keyCode!==13)return null;return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"input\":function($event){if(!$event.type.indexOf('key')&&$event.keyCode!==13)return null;return onInput.apply(null, arguments)}}})}`\n )\n // custom keycode\n assertCodegen(\n '<input @input.custom=\"onInput\">',\n- `with(this){return _c('input',{on:{\"input\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"custom\",undefined,$event.key,undefined))return null;return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"input\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"custom\",undefined,$event.key,undefined))return null;return onInput.apply(null, arguments)}}})}`\n )\n })\n \n it('generate events with generic modifiers', () => {\n assertCodegen(\n '<input @input.stop=\"onInput\">',\n- `with(this){return _c('input',{on:{\"input\":function($event){$event.stopPropagation();return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"input\":function($event){$event.stopPropagation();return onInput.apply(null, arguments)}}})}`\n )\n assertCodegen(\n '<input @input.prevent=\"onInput\">',\n- `with(this){return _c('input',{on:{\"input\":function($event){$event.preventDefault();return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"input\":function($event){$event.preventDefault();return onInput.apply(null, arguments)}}})}`\n )\n assertCodegen(\n '<input @input.self=\"onInput\">',\n- `with(this){return _c('input',{on:{\"input\":function($event){if($event.target !== $event.currentTarget)return null;return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"input\":function($event){if($event.target !== $event.currentTarget)return null;return onInput.apply(null, arguments)}}})}`\n )\n })\n \n // GitHub Issues #5146\n it('generate events with generic modifiers and keycode correct order', () => {\n assertCodegen(\n '<input @keydown.enter.prevent=\"onInput\">',\n- `with(this){return _c('input',{on:{\"keydown\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"enter\",13,$event.key,\"Enter\"))return null;$event.preventDefault();return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"keydown\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"enter\",13,$event.key,\"Enter\"))return null;$event.preventDefault();return onInput.apply(null, arguments)}}})}`\n )\n \n assertCodegen(\n '<input @keydown.enter.stop=\"onInput\">',\n- `with(this){return _c('input',{on:{\"keydown\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"enter\",13,$event.key,\"Enter\"))return null;$event.stopPropagation();return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"keydown\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"enter\",13,$event.key,\"Enter\"))return null;$event.stopPropagation();return onInput.apply(null, arguments)}}})}`\n )\n })\n \n it('generate events with mouse event modifiers', () => {\n assertCodegen(\n '<input @click.ctrl=\"onClick\">',\n- `with(this){return _c('input',{on:{\"click\":function($event){if(!$event.ctrlKey)return null;return onClick($event)}}})}`\n+ `with(this){return _c('input',{on:{\"click\":function($event){if(!$event.ctrlKey)return null;return onClick.apply(null, arguments)}}})}`\n )\n assertCodegen(\n '<input @click.shift=\"onClick\">',\n- `with(this){return _c('input',{on:{\"click\":function($event){if(!$event.shiftKey)return null;return onClick($event)}}})}`\n+ `with(this){return _c('input',{on:{\"click\":function($event){if(!$event.shiftKey)return null;return onClick.apply(null, arguments)}}})}`\n )\n assertCodegen(\n '<input @click.alt=\"onClick\">',\n- `with(this){return _c('input',{on:{\"click\":function($event){if(!$event.altKey)return null;return onClick($event)}}})}`\n+ `with(this){return _c('input',{on:{\"click\":function($event){if(!$event.altKey)return null;return onClick.apply(null, arguments)}}})}`\n )\n assertCodegen(\n '<input @click.meta=\"onClick\">',\n- `with(this){return _c('input',{on:{\"click\":function($event){if(!$event.metaKey)return null;return onClick($event)}}})}`\n+ `with(this){return _c('input',{on:{\"click\":function($event){if(!$event.metaKey)return null;return onClick.apply(null, arguments)}}})}`\n )\n assertCodegen(\n '<input @click.exact=\"onClick\">',\n- `with(this){return _c('input',{on:{\"click\":function($event){if($event.ctrlKey||$event.shiftKey||$event.altKey||$event.metaKey)return null;return onClick($event)}}})}`\n+ `with(this){return _c('input',{on:{\"click\":function($event){if($event.ctrlKey||$event.shiftKey||$event.altKey||$event.metaKey)return null;return onClick.apply(null, arguments)}}})}`\n )\n assertCodegen(\n '<input @click.ctrl.exact=\"onClick\">',\n- `with(this){return _c('input',{on:{\"click\":function($event){if(!$event.ctrlKey)return null;if($event.shiftKey||$event.altKey||$event.metaKey)return null;return onClick($event)}}})}`\n+ `with(this){return _c('input',{on:{\"click\":function($event){if(!$event.ctrlKey)return null;if($event.shiftKey||$event.altKey||$event.metaKey)return null;return onClick.apply(null, arguments)}}})}`\n )\n })\n \n it('generate events with multiple modifiers', () => {\n assertCodegen(\n '<input @input.stop.prevent.self=\"onInput\">',\n- `with(this){return _c('input',{on:{\"input\":function($event){$event.stopPropagation();$event.preventDefault();if($event.target !== $event.currentTarget)return null;return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"input\":function($event){$event.stopPropagation();$event.preventDefault();if($event.target !== $event.currentTarget)return null;return onInput.apply(null, arguments)}}})}`\n )\n })\n \n it('generate events with capture modifier', () => {\n assertCodegen(\n '<input @input.capture=\"onInput\">',\n- `with(this){return _c('input',{on:{\"!input\":function($event){return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"!input\":function($event){return onInput.apply(null, arguments)}}})}`\n )\n })\n \n it('generate events with once modifier', () => {\n assertCodegen(\n '<input @input.once=\"onInput\">',\n- `with(this){return _c('input',{on:{\"~input\":function($event){return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"~input\":function($event){return onInput.apply(null, arguments)}}})}`\n )\n })\n \n it('generate events with capture and once modifier', () => {\n assertCodegen(\n '<input @input.capture.once=\"onInput\">',\n- `with(this){return _c('input',{on:{\"~!input\":function($event){return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"~!input\":function($event){return onInput.apply(null, arguments)}}})}`\n )\n })\n \n it('generate events with once and capture modifier', () => {\n assertCodegen(\n '<input @input.once.capture=\"onInput\">',\n- `with(this){return _c('input',{on:{\"~!input\":function($event){return onInput($event)}}})}`\n+ `with(this){return _c('input',{on:{\"~!input\":function($event){return onInput.apply(null, arguments)}}})}`\n )\n })\n \n@@ -538,7 +538,7 @@ describe('codegen', () => {\n // with modifiers\n assertCodegen(\n `<input @keyup.enter=\"e=>current++\">`,\n- `with(this){return _c('input',{on:{\"keyup\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"enter\",13,$event.key,\"Enter\"))return null;return (e=>current++)($event)}}})}`\n+ `with(this){return _c('input',{on:{\"keyup\":function($event){if(!$event.type.indexOf('key')&&_k($event.keyCode,\"enter\",13,$event.key,\"Enter\"))return null;return (e=>current++).apply(null, arguments)}}})}`\n )\n })\n \n@@ -563,7 +563,7 @@ describe('codegen', () => {\n it('generate multiple event handlers', () => {\n assertCodegen(\n '<input @input=\"current++\" @input.stop=\"onInput\">',\n- `with(this){return _c('input',{on:{\"input\":[function($event){current++},function($event){$event.stopPropagation();return onInput($event)}]}})}`\n+ `with(this){return _c('input',{on:{\"input\":[function($event){current++},function($event){$event.stopPropagation();return onInput.apply(null, arguments)}]}})}`\n )\n })\n ", "filename": "test/unit/modules/compiler/codegen.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n[https://github.com/vuejs/vue/blob/dev/dist/vue.esm.browser.js#L3296](https://github.com/vuejs/vue/blob/dev/dist/vue.esm.browser.js#L3296)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nSee the link https://github.com/vuejs/vue/blob/dev/dist/vue.esm.browser.js#L3296\r\n\r\n### What is expected?\r\nfunction createComponentInstanceForVnode (\r\n vnode, // we know it's MountedComponentVNode but flow doesn't\r\n parent // activeInstance in lifecycle state\r\n) {\r\n...\r\n}\r\n\r\n### What is actually happening?\r\nfunction createComponentInstanceForVnode (\r\n vnode, // we know it's MountedComponentVNode but flow doesn't\r\n parent, // activeInstance in lifecycle state\r\n) {\r\n...\r\n}\r\n\r\n---\r\nI've got an error when trying to add the vue.esm.browser.js file into a bundle using systemjs-builder\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [], "number": 10843, "title": "Unexpected comma in the function declaration" }
{ "body": "Fix #10843\r\n\r\nSince it's not supported on [many browsers](https://caniuse.com/#feat=mdn-javascript_grammar_trailing_commas_trailing_commas_in_functions), we should just remove it\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [x] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 10845, "review_comments": [], "title": "fix(core): remove trailing comma in function signature" }
{ "commits": [ { "message": "fix(core): remove trailing comma in function signature\n\nFix #10843" } ], "files": [ { "diff": "@@ -207,7 +207,7 @@ export function createComponent (\n \n export function createComponentInstanceForVnode (\n vnode: any, // we know it's MountedComponentVNode but flow doesn't\n- parent: any, // activeInstance in lifecycle state\n+ parent: any // activeInstance in lifecycle state\n ): Component {\n const options: InternalComponentOptions = {\n _isComponent: true,", "filename": "src/core/vdom/create-component.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n[https://jsfiddle.net/4fyrj95L/](https://jsfiddle.net/4fyrj95L/)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nset model binded to select element and the array that iterated the option list at the same time\r\n\r\nmake sure the new model value do not match any of the new options\r\n\r\nthe model will be set to `undefined`\r\n\r\n### What is expected?\r\n\r\nmodel value to be set to `1`\r\n\r\n### What is actually happening?\r\nmodel's value set to `undefined`\r\n\r\n---\r\nThis bug only appears when model and the options changed at the same and the new model value does not match any option, other situations behaves correctly as far as I tested.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "as a workaround you can set a `key` on the `select` element that changes alongside the selected list", "created_at": "2019-10-10T17:38:33Z" }, { "body": "It has nothing to do with the key, add key or keys do not resolve the problem.\r\nActurally the bug is triggered by an internal logic.\r\nSee the pr I created above.\r\n@posva\r\n", "created_at": "2019-10-11T01:04:09Z" }, { "body": "A workaround is only a temporary fix for the problem for you or anybody to have a solution until a fix is released: The original problem has nothing to deal with keys yet, adding a `key` **does remove the bug**", "created_at": "2019-10-11T07:28:03Z" }, { "body": "@posva Maybe im reading the description incorrectly but this sounds like expected behavior. If i change the list of available options, and then change the value to something not in the list of available options, it _should_ be `undefined`, right?", "created_at": "2020-01-14T15:05:07Z" }, { "body": "It should keep the set value instead of changing it to `undefined`", "created_at": "2020-01-14T17:06:02Z" }, { "body": "This issue is owned by the logic of the code:\r\nhttps://github.com/vuejs/vue/pull/10615/files\r\nthe `$selectedValue` variable in line 121 may be an empty array, but the subsequent code read its first element, if the code is in c/c++, that would be an error or index out of bound.\r\nsetting it to `undefined` is just an accident.", "created_at": "2020-01-20T01:07:33Z" } ], "number": 10614, "title": "when <select> model and the option list changed at the same time, model may incorrectly set to `undefined`" }
{ "body": "fix(select-model): fix the issue of select's v-model set to `undefined` in some corner case\r\nfix #10614\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 10615, "review_comments": [ { "body": "$$selectedVal may be an empty array, this way we should not return its first element\r\nreserve the model's previous value is a reasonable choice", "created_at": "2019-10-10T17:09:59Z" } ], "title": "fix #10614: the select's v-model set to `undefined` in some corner case" }
{ "commits": [ { "message": "fix(select-model): fix the issue of select's v-model set to `undefined` in some corner case\n\nfix #10614" } ], "files": [ { "diff": "@@ -118,7 +118,7 @@ function genSelect (\n `.map(function(o){var val = \"_value\" in o ? o._value : o.value;` +\n `return ${number ? '_n(val)' : 'val'}})`\n \n- const assignment = '$event.target.multiple ? $$selectedVal : $$selectedVal[0]'\n+ const assignment = `$event.target.multiple ? $$selectedVal : ($$selectedVal.length > 0 ? $$selectedVal[0] : ${value})`\n let code = `var $$selectedVal = ${selectedVal};`\n code = `${code} ${genAssignmentCode(value, assignment)}`\n addHandler(el, 'change', code, null, true)", "filename": "src/platforms/web/compiler/directives/model.js", "status": "modified" }, { "diff": "@@ -232,6 +232,25 @@ describe('Directive v-model select', () => {\n }).then(done)\n })\n \n+ it('should not set model to `undefined` when both model and options changed', (done) => {\n+ const vm = new Vue({\n+ data: {\n+ test: 'a',\n+ opts: ['a', 'b', 'c']\n+ },\n+ template:\n+ '<select v-model=\"test\">' +\n+ '<option v-for=\"o in opts\" :value=\"o\">option {{ o }}</option>' +\n+ '</select>'\n+ }).$mount()\n+ document.body.appendChild(vm.$el)\n+ vm.test = '1'\n+ vm.opts = ['2', '3', '4']\n+ waitForUpdate(() => {\n+ expect(vm.test).toBe('1') // should not set vm.test to `undefined` but reserves '1'\n+ }).then(done)\n+ })\n+\n if (!hasMultiSelectBug()) {\n it('multiple', done => {\n const vm = new Vue({", "filename": "test/unit/features/directives/model-select.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n[https://github.com/oguimbal/vuebug](https://github.com/oguimbal/vuebug)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\n```\r\ngit clone git@github.com:oguimbal/vuebug.git\r\nnpm i\r\nnpm start\r\n```\r\n\r\nWait a couple of seconds, and your compilation process will be frozen.\r\n\r\nIf you attach a debugger to the node process, you will see the infinite loop in `generateCodeFrame()` method of vue-template-compiler:\r\n\r\n![bug](https://raw.githubusercontent.com/oguimbal/vuebug/master/bug.png)\r\n\r\n### What is expected?\r\nI would expect the compiler not to freeze\r\n\r\n### What is actually happening?\r\nThe compiler is freezing\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "Hey, in order to check we need a boiled down repro without any extra dependency (like storybook). Ping me when you get a boiled down repro so I can get a look", "created_at": "2019-09-22T22:32:51Z" }, { "body": "@posva No problem... \r\n\r\nhttps://github.com/oguimbal/vuebug-simple\r\n\r\n```\r\ngit clone git@github.com:oguimbal/vuebug-simple.git\r\ncd vuebug-simple\r\nnpm i\r\ncode .\r\n```\r\n\r\nThen hit F5 (if using vscode, otherwise `npm start`) => freeze.\r\n\r\nnb: I know that i forgot to include pug loader ... but thats the point, it freezes without error.\r\n\r\n[edit] NB: The tight loop is in [generateCodeFrame()](https://github.com/vuejs/vue/blob/77796596adc48d050beefd11e827e8e4d44c6b3c/src/compiler/codeframe.js), where [line 16-17](https://github.com/vuejs/vue/blob/77796596adc48d050beefd11e827e8e4d44c6b3c/src/compiler/codeframe.js#L17) seems fishy to me", "created_at": "2019-09-22T23:15:42Z" }, { "body": "It shows an error instead of freezing:\r\n\r\n```\r\nERROR in ./bug.vue?vue&type=template&id=20e40a56&lang=pug& (./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vue-loader/lib??vue-loader-options!./bug.vue?vue&type=template&id=20e40a56&lang=pug&)\r\nModule Error (from ./node_modules/vue-loader/lib/loaders/templateLoader.js):\r\n(Emitted value instead of an instance of Error)\r\n\r\n Errors compiling template:\r\n\r\n Component template requires a root element, rather than just text.\r\n\r\n 1 |\r\n |\r\n 2 | div Whatever\r\n | ^^^^^^^^^^^^\r\n 3 |\r\n\r\n @ ./bug.vue?vue&type=template&id=20e40a56&lang=pug& 1:0-202 1:0-202\r\n @ ./bug.vue\r\n @ ./main.js\r\n```\r\n\r\nThe error comes from vue-loader though. It could maybe display a warning if the specified lang does not contain a rule in the webpack config but at the same time the need of using pre processors is explained on the very first paragraph", "created_at": "2019-09-23T08:37:13Z" }, { "body": "Not on my machine oO' (see screenshot as \"proof\")\r\n\r\nHowever, if you look at [this](https://github.com/vuejs/vue/blob/77796596adc48d050beefd11e827e8e4d44c6b3c/src/compiler/codeframe.js#L17) , it is kind of obvious that this loop can never finish in some circumstances.\r\n\r\nFor me, with my repo, it gets called that way:\r\n\r\n```\r\ngenerateCodeFrame(`\r\ndiv Whatever\r\n`, 2, 16)\r\n```\r\n\r\nWhich freezes: [https://codesandbox.io/embed/xenodochial-roentgen-5dbbq](https://codesandbox.io/embed/xenodochial-roentgen-5dbbq)\r\n\r\n![image](https://user-images.githubusercontent.com/8973947/65412381-6866ca80-ddef-11e9-87d7-679ea8505db0.png)\r\n", "created_at": "2019-09-23T09:00:01Z" }, { "body": "@posva I know know if you dont believe me or if you dont have the time to look at it, but I think it is probably due to the fact that I'm on **Windows** and you might not: My line breaks are `\\r\\n` instead of `\\n` ... \r\n\r\nThus generateCodeFrame() gets called with \"16\" as length in the example above (which should be 14 without `\\r`'s)\r\n\r\nThe split `const lines = source.split(/\\r?\\n/);` removes those two caracters.\r\n\r\nBut the line `count += lineLength + 1;` does not take them into account (only adds 1 line return character), and there is no check that gets execution out of this infinite loop.\r\n\r\nAnyway I wont bother you further with that, the problem is solved for me anyway :)\r\n", "created_at": "2019-09-24T08:06:05Z" }, { "body": "I see, you should use LF ending. It can be enforced through eslint via `linebreak-style`.\r\nLet me check a bit more about this", "created_at": "2019-09-24T08:16:41Z" }, { "body": "Even when using CRLF endings in the bug.vue file (on osx), I get the same error as before, no infinite loop 🤔 ", "created_at": "2019-09-24T08:49:35Z" }, { "body": "I just reproduced this issue… The repository URL in OP is different from the one in the screenshot. The correct one is https://github.com/oguimbal/vuebug-simple\r\nAfter cloning this repo & change the line endings I can reproduce the frozen output bug.", "created_at": "2019-09-24T09:06:51Z" }, { "body": "So… I believe this is a bug. PR's welcome.", "created_at": "2019-09-24T11:22:02Z" }, { "body": "Well I have found the culprit 😂\r\nhttps://github.com/vuejs/vue/blob/d2db6af1a55fdb4d65746fb67f7bfbced7d916f0/src/compiler/codeframe.js#L8\r\n\r\nThis line.\r\nThere're many warnings that do not provide the `end` position.\r\nFor those warnings, `end` is calculated by `source.length`. However, in the following for-loop, `count` is incremented by `lineLength + 1`, which is incorrect if the line ends with CRLF. Thus the infinite loop.\r\n\r\nAlso, this only happens when the template does not have an indentation because otherwise Vue would have [normalized the template source during de-indentation](https://github.com/vuejs/vue/blob/d2db6af1a55fdb4d65746fb67f7bfbced7d916f0/src/sfc/parser.js#L101)", "created_at": "2019-09-24T11:47:36Z" }, { "body": "The fix is as simple as\r\n```\r\nend: number = source.replace(/\\r\\n/g, '\\n').length\r\n```", "created_at": "2019-09-24T11:48:53Z" }, { "body": "Created [a pull request](https://github.com/vuejs/vue/pull/10553) that should fix it.\r\n\r\nJust a suggestion that is a bit more agnostic about wether if this funciton input has CR in source.\r\n( @sodatea fix would work, but only if caller does not provide the `end` argument).\r\n\r\nPlus it breaks out of the loop if `j >= lines.length` ... if i'm correct, there is no good reason to stick in the loop in this case :)", "created_at": "2019-09-24T13:49:45Z" }, { "body": "哈哈", "created_at": "2019-10-24T06:12:19Z" }, { "body": "xx", "created_at": "2019-11-18T09:58:40Z" }, { "body": "Any updates on this ?", "created_at": "2021-08-30T01:44:20Z" } ], "number": 10547, "title": "Infinite loop in vue-template-compiler" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n\r\nI'm having timeouts when trying to run some e2e tests ... but they seem unrelated to this fix (they happen without this fix)\r\n\r\nCloses #10547", "number": 10553, "review_comments": [], "title": "fix(compiler): infinite loop in generateCodeFrame (#10547)" }
{ "commits": [ { "message": "fix(compiler): infinite loop in generateCodeFrame (#10547)" }, { "message": "fix(compiler): infinite loop in generateCodeFrame (#10547)" } ], "files": [ { "diff": "@@ -5,18 +5,23 @@ const range = 2\n export function generateCodeFrame (\n source: string,\n start: number = 0,\n- end: number = source.length\n+ end?: number\n ): string {\n- const lines = source.split(/\\r?\\n/)\n+ const lines = source.split(/\\n/)\n+ .map(x => x[x.length - 1] === '\\r' ? { line: x.substr(0, x.length - 1), increment: 2 } : { line: x, increment: 1})\n+ if (typeof end === 'undefined') {\n+ end = lines.reduce((s, x) => s + x.line.length + x.increment, 0)\n+ }\n let count = 0\n const res = []\n for (let i = 0; i < lines.length; i++) {\n- count += lines[i].length + 1\n+ count += lines[i].line.length + lines[i].increment\n if (count >= start) {\n for (let j = i - range; j <= i + range || end > count; j++) {\n- if (j < 0 || j >= lines.length) continue\n- res.push(`${j + 1}${repeat(` `, 3 - String(j + 1).length)}| ${lines[j]}`)\n- const lineLength = lines[j].length\n+ if (j < 0) continue\n+ if (j >= lines.length) break\n+ res.push(`${j + 1}${repeat(` `, 3 - String(j + 1).length)}| ${lines[j].line}`)\n+ const lineLength = lines[j].line.length\n if (j === i) {\n // push underline\n const pad = start - (count - lineLength) + 1\n@@ -27,7 +32,7 @@ export function generateCodeFrame (\n const length = Math.min(end - count, lineLength)\n res.push(` | ` + repeat(`^`, length))\n }\n- count += lineLength + 1\n+ count += lineLength + lines[j].increment\n }\n }\n break", "filename": "src/compiler/codeframe.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n\r\n[https://jsfiddle.net/zs5dan7x/](https://jsfiddle.net/zs5dan7x/)\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\n- pass a Symbol as the value of a component prop which expects another type\r\n\r\n### What is expected?\r\nthe console should show the validation error message\r\n\r\n### What is actually happening?\r\nthe prop validator tries to build an validation error message, it fails with :\r\n\r\n TypeError: Cannot convert a Symbol value to a string\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "Marked as a bug because it crashes the application", "created_at": "2019-09-17T09:13:28Z" }, { "body": "Symbol('bad').toString()", "created_at": "2019-09-17T09:42:06Z" } ], "number": 10519, "title": "prop validator fails to generate validation error message when using Symbols" }
{ "body": "Fixes #10519\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 10529, "review_comments": [], "title": "fix(props): correctly warn when a provided prop is Symbol" }
{ "commits": [ { "message": "fix(props): correctly warn when a provided prop is Symbol\n\nFixes #10519" }, { "message": "style: space before parens" } ], "files": [ { "diff": "@@ -205,18 +205,19 @@ function getInvalidTypeMessage (name, value, expectedTypes) {\n ` Expected ${expectedTypes.map(capitalize).join(', ')}`\n const expectedType = expectedTypes[0]\n const receivedType = toRawType(value)\n- const expectedValue = styleValue(value, expectedType)\n- const receivedValue = styleValue(value, receivedType)\n // check if we need to specify expected value\n- if (expectedTypes.length === 1 &&\n- isExplicable(expectedType) &&\n- !isBoolean(expectedType, receivedType)) {\n- message += ` with value ${expectedValue}`\n+ if (\n+ expectedTypes.length === 1 &&\n+ isExplicable(expectedType) &&\n+ isExplicable(typeof value) &&\n+ !isBoolean(expectedType, receivedType)\n+ ) {\n+ message += ` with value ${styleValue(value, expectedType)}`\n }\n message += `, got ${receivedType} `\n // check if we need to specify received value\n if (isExplicable(receivedType)) {\n- message += `with value ${receivedValue}.`\n+ message += `with value ${styleValue(value, receivedType)}.`\n }\n return message\n }\n@@ -231,9 +232,9 @@ function styleValue (value, type) {\n }\n }\n \n+const EXPLICABLE_TYPES = ['string', 'number', 'boolean']\n function isExplicable (value) {\n- const explicitTypes = ['string', 'number', 'boolean']\n- return explicitTypes.some(elem => value.toLowerCase() === elem)\n+ return EXPLICABLE_TYPES.some(elem => value.toLowerCase() === elem)\n }\n \n function isBoolean (...args) {", "filename": "src/core/util/props.js", "status": "modified" }, { "diff": "@@ -241,6 +241,16 @@ describe('Options props', () => {\n makeInstance({}, Symbol)\n expect('Expected Symbol, got Object').toHaveBeenWarned()\n })\n+\n+ it('warns when expected an explicable type but Symbol was provided', () => {\n+ makeInstance(Symbol('foo'), String)\n+ expect('Expected String, got Symbol').toHaveBeenWarned()\n+ })\n+\n+ it('warns when expected an explicable type but Symbol was provided', () => {\n+ makeInstance(Symbol('foo'), [String, Number])\n+ expect('Expected String, Number, got Symbol').toHaveBeenWarned()\n+ })\n }\n \n it('custom constructor', () => {", "filename": "test/unit/features/options/props.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n\r\n- Functional components: [https://github.com/sin1ght/test](https://github.com/sin1ght/test)\r\n- Regular components and slots: https://codesandbox.io/embed/vue-template-3pnsx\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nAfter npm run serve, click the toggle button and find that child has no style.\r\n\r\nChild and child2 components are reused, child's data-v-* disappears, causing the style to disappear\r\n\r\n### What is expected?\r\n\r\nChild should have a black background scope style\r\n\r\n### What is actually happening?\r\n\r\nChild without style\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "not only functional components,use functional components just to make sameVnode true,a functional component and a normal dom is ok as well", "created_at": "2019-08-20T21:53:27Z" }, { "body": "i add two lines code in patchVnode,and it work will at my demo,but i'm not sure\r\n\r\n`oldVnode.fnScopeId = vnode.fnScopeId;\r\n setScope(oldVnode);`\r\n\r\n![mt44X9.png](https://s2.ax1x.com/2019/08/21/mt44X9.png)", "created_at": "2019-08-21T03:10:42Z" }, { "body": "I tested with the 4 possible combinations among regular and functional components and the problem only appears when both are functional\r\n\r\nIf you are up to give the fix a try, you can. If other things break, you should see tests failing", "created_at": "2019-08-21T10:13:05Z" }, { "body": "if samaVnode true then patchVnode\r\n\r\n![mUrTBT.png](https://s2.ax1x.com/2019/08/21/mUrTBT.png)\r\n\r\nregular component's tag will changed,but functional component not\r\n\r\n![mUrxjx.png](https://s2.ax1x.com/2019/08/21/mUrxjx.png)\r\n\r\nso,a normal dom is ok as well\r\n\r\n```javascript\r\n//parent.vue\r\n\r\n<template>\r\n <div class=\"parent\">\r\n <Child class=\"child\" v-if=\"!test\"/>\r\n <div class=\"child2\" v-if=\"test\">\r\n </div>\r\n</template>\r\n```\r\n\r\n", "created_at": "2019-08-21T11:00:50Z" }, { "body": "similar bug, affected pure normal components with slot, please see my simple demo.\r\n\r\nhttps://codesandbox.io/s/vue-template-jimot", "created_at": "2019-08-25T15:45:12Z" }, { "body": "@CzBiX \r\nI think your problem is same with me.\r\ntwo child reused but scopedId not be copied .\r\n![mRH28I.png](https://s2.ax1x.com/2019/08/26/mRH28I.png)", "created_at": "2019-08-26T02:45:12Z" }, { "body": "@sin1ght It's weird I couldn't see the problem in your repro using regular components but others' repro do show so I adapted the title. Sorry about that!", "created_at": "2019-08-29T11:51:57Z" }, { "body": "Hi, I'm interested interested in this particular task, how to I go ahead", "created_at": "2022-12-17T05:35:13Z" }, { "body": "@posva I submitted a pull request for this issue #12938 . But I noticed the requirements state that I should merge to main instead of dev, even though the Contributing Guide says I shouldn't.\r\n\r\nShould I make one for main instead?", "created_at": "2023-01-18T21:58:51Z" } ], "number": 10416, "title": "Scoped CSS attribute is reused or discarded when switching between components with scoped CSS" }
{ "body": "fix #10416\r\n", "number": 10454, "review_comments": [], "title": "fix: update copy fnScopeId in patchVnode (fix: #10416)" }
{ "commits": [ { "message": "fix: update copy fnScopeId in patchVnode\n\nfix #10416" } ], "files": [ { "diff": "@@ -547,6 +547,10 @@ export function createPatchFunction (backend) {\n \n const oldCh = oldVnode.children\n const ch = vnode.children\n+\n+ oldVnode.fnScopeId = vnode.fnScopeId; \n+ setScope(oldVnode);\n+\n if (isDef(data) && isPatchable(vnode)) {\n for (i = 0; i < cbs.update.length; ++i) cbs.update[i](oldVnode, vnode)\n if (isDef(i = data.hook) && isDef(i = i.update)) i(oldVnode, vnode)", "filename": "src/core/vdom/patch.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n[https://codesandbox.io/s/vue-template-j1w3r](https://codesandbox.io/s/vue-template-j1w3r)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nIn my example change the value by typing in the input field.\r\nIn the working example both values change.\r\nIn the not working example only the nested value changes.\r\nThe only difference is the wrapping `<template>` node\r\n\r\n### What is expected?\r\nscoped slot params (controllerSlotData) should be reactive in both cases\r\n\r\n### What is actually happening?\r\nscoped slot params (controllerSlotData) is not reative\r\n\r\n---\r\nThe problem exists only when combining v-slot on component itself in combination with v-if/v-else\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "I don't know why I didn't see this issue before opening #11553... Anyway, do you know when #10377 will be merged?", "created_at": "2020-07-31T06:40:09Z" } ], "number": 10330, "title": "nested v-slot is not reactive when using abbreviated syntax (v-slot on component itself) combined with an v-if/v-else" }
{ "body": "fix nested v-slot not updating when v-if/v-else used in the root element of the slot content, and it will close #10330 .\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [ ] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 10377, "review_comments": [], "title": "fix(compiler): fix nested v-slot not updating when used with v-if/v-else" }
{ "commits": [ { "message": "fix(compiler): fix nested v-slot not updating when v-if/v-else used in\nthe root element of the slot content\n\nclose #10330" } ], "files": [ { "diff": "@@ -698,6 +698,13 @@ function processSlotContent (el) {\n slotContainer.children = el.children.filter((c: any) => {\n if (!c.slotScope) {\n c.parent = slotContainer\n+ if (c.ifConditions) {\n+ // #10330\n+ // update every element's parent when it is in the other condition branch\n+ // so we can find correctly whether the element is inside another scoped slot when\n+ // generating scoped slot's rendering code, and this will trigger force updating\n+ c.ifConditions.forEach(({block}) => block.parent = slotContainer)\n+ }\n return true\n }\n })", "filename": "src/compiler/parser/index.js", "status": "modified" }, { "diff": "@@ -1325,4 +1325,39 @@ describe('Component scoped slot', () => {\n expect(vm.$el.textContent).toMatch(`1`)\n }).then(done)\n })\n+\n+ // #10330\n+ it('nested v-slot should be reactive when v-slot on component itself combined with v-if/v-else', done => {\n+ const Container = {\n+ template: `<div><slot v-bind=\"n\" /></div>`,\n+ props: ['n']\n+ }\n+\n+ const Nested = {\n+ template: `<div><slot v-bind=\"m\" /></div>`,\n+ props: ['m']\n+ }\n+\n+ const vm = new Vue({\n+ data: {\n+ n: { value: 0 }, \n+ disabled: false\n+ },\n+ components: { Container, Nested },\n+ template: `\n+ <container v-slot=\"n\" :n=\"n\">\n+ <div v-if=\"disabled\">Disabled</div>\n+ <nested v-else v-slot=\"m\" :m=\"n\">\n+ {{n.value}} {{m.value}}\n+ </nested>\n+ </container>\n+ `\n+ }).$mount()\n+\n+ expect(vm.$el.textContent).toMatch(`0 0`)\n+ vm.n.value++\n+ waitForUpdate(() => {\n+ expect(vm.$el.textContent).toMatch(`1 1`)\n+ }).then(done)\n+ })\n })", "filename": "test/unit/features/component/component-scoped-slot.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n[https://jsfiddle.net/zrh122/scg6eq1t/](https://jsfiddle.net/zrh122/scg6eq1t/)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\n1. open the reproduction link\r\n2. look at the rendered result\r\n\r\n### What is expected?\r\n```js\r\na: a\r\nb: b\r\n```\r\n\r\n### What is actually happening?\r\n```js\r\na:\r\nb: b\r\n```\r\n\r\n---\r\nThis is an english version of issue #10165.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [], "number": 10271, "title": "dynamic v-slot maybe overwite when used together with v-for on same element" }
{ "body": "fixes #10271\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [ ] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**", "number": 10272, "review_comments": [], "title": "fix(compiler): avoid overwriting when dynamic v-slot used with v-for" }
{ "commits": [ { "message": "fix(compiler): avoid overwriting when dynamic v-slot used with v-for\n\nfixes #10271" } ], "files": [ { "diff": "@@ -131,7 +131,8 @@ declare type ASTElement = {\n slotTarget?: ?string;\n slotTargetDynamic?: boolean;\n slotScope?: ?string;\n- scopedSlots?: { [name: string]: ASTElement };\n+ scopedSlots?: Array<ASTElement>;\n+ scopedSlotsMap?: { [name: string]: number };\n \n ref?: string;\n refInFor?: boolean;", "filename": "flow/compiler.js", "status": "modified" }, { "diff": "@@ -359,22 +359,21 @@ function genInlineTemplate (el: ASTElement, state: CodegenState): ?string {\n \n function genScopedSlots (\n el: ASTElement,\n- slots: { [key: string]: ASTElement },\n+ slots: Array<ASTElement>,\n state: CodegenState\n ): string {\n // by default scoped slots are considered \"stable\", this allows child\n // components with only scoped slots to skip forced updates from parent.\n // but in some cases we have to bail-out of this optimization\n // for example if the slot contains dynamic names, has v-if or v-for on them...\n- let needsForceUpdate = el.for || Object.keys(slots).some(key => {\n- const slot = slots[key]\n- return (\n+ let needsForceUpdate = el.for || slots.some(slot =>\n+ (\n slot.slotTargetDynamic ||\n slot.if ||\n slot.for ||\n containsSlotChild(slot) // is passing down slot from parent which may be dynamic\n )\n- })\n+ )\n \n // #9534: if a component with scoped slots is inside a conditional branch,\n // it's possible for the same component to be reused but with different\n@@ -404,8 +403,8 @@ function genScopedSlots (\n }\n }\n \n- const generatedSlots = Object.keys(slots)\n- .map(key => genScopedSlot(slots[key], state))\n+ const generatedSlots = slots\n+ .map(slot => genScopedSlot(slot, state))\n .join(',')\n \n return `scopedSlots:_u([${generatedSlots}]${", "filename": "src/compiler/codegen/index.js", "status": "modified" }, { "diff": "@@ -229,3 +229,27 @@ function rangeSetItem (\n }\n return item\n }\n+\n+export function addScopedSlot (\n+ target: ASTElement,\n+ name: string,\n+ slot: ASTElement,\n+ append?: boolean\n+) {\n+ const scopedSlots = target.scopedSlots || (target.scopedSlots = [])\n+ if (append) {\n+ // don't check the name of the scoped slot\n+ // for example, dynamic v-slot and v-for are used on the same element\n+ scopedSlots.push(slot)\n+ } else {\n+ // $flow-disable-line\n+ const scopedSlotsMap = target.scopedSlotsMap || (target.scopedSlotsMap = Object.create(null))\n+ const i = scopedSlotsMap[name]\n+ if (i >= 0) {\n+ // overwrite it if already has same name scoped slot\n+ scopedSlots[i] = slot\n+ } else {\n+ scopedSlotsMap[name] = scopedSlots.push(slot) - 1\n+ }\n+ }\n+}", "filename": "src/compiler/helpers.js", "status": "modified" }, { "diff": "@@ -14,6 +14,7 @@ import {\n baseWarn,\n addHandler,\n addDirective,\n+ addScopedSlot,\n getBindingAttr,\n getAndRemoveAttr,\n getRawBindingAttr,\n@@ -143,8 +144,18 @@ export function parse (\n // scoped slot\n // keep it in the children list so that v-else(-if) conditions can\n // find it as the prev node.\n- const name = element.slotTarget || '\"default\"'\n- ;(currentParent.scopedSlots || (currentParent.scopedSlots = {}))[name] = element\n+ if (\n+ element.slotTargetDynamic &&\n+ element.for &&\n+ element.slotTarget\n+ ) {\n+ // #10271\n+ // dynamic v-slot and v-for are used on the same element\n+ // TODO: checking if dynamic slot target actually use scope variables in v-for\n+ addScopedSlot(currentParent, element.slotTarget, element, true)\n+ } else {\n+ addScopedSlot(currentParent, element.slotTarget || '\"default\"', element)\n+ }\n }\n currentParent.children.push(element)\n element.parent = currentParent\n@@ -690,9 +701,9 @@ function processSlotContent (el) {\n }\n }\n // add the component's children to its default slot\n- const slots = el.scopedSlots || (el.scopedSlots = {})\n const { name, dynamic } = getSlotName(slotBinding)\n- const slotContainer = slots[name] = createASTElement('template', [], el)\n+ const slotContainer = createASTElement('template', [], el)\n+ addScopedSlot(el, name, slotContainer)\n slotContainer.slotTarget = name\n slotContainer.slotTargetDynamic = dynamic\n slotContainer.children = el.children.filter((c: any) => {", "filename": "src/compiler/parser/index.js", "status": "modified" }, { "diff": "@@ -1325,4 +1325,32 @@ describe('Component scoped slot', () => {\n expect(vm.$el.textContent).toMatch(`1`)\n }).then(done)\n })\n+\n+ // #10271\n+ it('should work when dynamic slot name used in v-for', () => {\n+ const Foo = {\n+ template: `\n+ <div>\n+ <slot name=\"a\" />\n+ <slot name=\"b\" />\n+ <slot name=\"c\" />\n+ </div>\n+ `\n+ }\n+ const vm = new Vue({\n+ data: {\n+ item: 'c'\n+ },\n+ template: `\n+ <foo>\n+ <template v-slot:[item] v-for=\"item in ['a']\">A {{ item }}</template>\n+ <template v-slot:[item] v-for=\"item in ['b']\">B {{ item }}</template>\n+ <template v-slot:[item]>C {{ item }}</template>\n+ </foo>\n+ `,\n+ components: { Foo }\n+ }).$mount()\n+\n+ expect(vm.$el.textContent.trim()).toBe('A a B b C c')\n+ })\n })", "filename": "test/unit/features/component/component-scoped-slot.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n[https://jsfiddle.net/mybeta/1dfwsazn/](https://jsfiddle.net/mybeta/1dfwsazn/)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nWhen I use DOM Template Parsing Caveats to add a component (e.g. `<tr is=\"custom-row\">` ) I can't use scoped slots (slotProps) without receiving a warning in the console. I would like to use it this way:\r\n```html\r\n<tr is=\"custom-row\" v-slot=\"slotProps\">\r\n slotProps: {{slotProps.test}}\r\n</tr>\r\n```\r\n\r\n### What is expected?\r\nNo warning.\r\n\r\n### What is actually happening?\r\nI get the following warning in the console. \r\n>```\r\n>[Vue warn]: Error compiling template:\r\n>v-slot can only be used on components or <template>.\r\n>```\r\n\r\nThe rest looks like it works as expected.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "As the warning says `v-slot` can only be used on the wrapper components or nested in the component itself but on a `template` tag.\r\nPreviously, `slot-scope` would allow this usage but it was removed, you can check out the RFC: https://github.com/vuejs/rfcs/blob/master/active-rfcs/0001-new-slot-syntax.md.\r\n\r\nIf you only have one scoped slot, you should be able to do \r\n\r\n```html\r\n<tr :is=\"customComp\" v-slot=\"data\"></tr>\r\n```", "created_at": "2019-06-13T16:46:00Z" }, { "body": "Hi @posva , thanks a lot for your comment! \r\n\r\nAs suggested, I have also tried to use `v-slot` nested inside a `template` tag, something like the below. \r\n```html\r\n<div is=\"test-component\">\r\n <template v-slot:default=\"slotProps\">\r\n slotProps: {{slotProps.test}}\r\n </template>\r\n</div>\r\n```\r\nHowever that produces a warning as well, see fiddle: \r\nhttps://jsfiddle.net/mybeta/bhcoLwu3/\r\n\r\nWhen avoiding the DOM template parsing caveats, it works just fine: \r\n```html\r\n<test-component>\r\n <template v-slot:default=\"slotProps\">\r\n slotProps: {{slotProps.test}}\r\n </template>\r\n</test-component>\r\n```\r\nbut I believe I need to use the caveat, when using the component on a `tr` tag. \r\n\r\nAlso, would you mind explaining this comment a bit more? \r\n> If you only have one scoped slot, you should be able to do `\r\n\r\nThanks!\r\n", "created_at": "2019-06-15T06:40:25Z" }, { "body": "I updated my comment but the warning seems to appear anyway, the warning shouldn't appear indeed", "created_at": "2019-06-18T23:38:06Z" }, { "body": "What is the status of this?", "created_at": "2020-11-04T18:51:08Z" } ], "number": 10152, "title": "Scoped Slot warns when used inside of dynamic component on regular element" }
{ "body": "Add condition to see whether the element may be an component.\r\nfixes #10152\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [ ] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 10167, "review_comments": [], "title": "fix(warns): modify `maybeComponent` function in parser" }
{ "commits": [ { "message": "fix(compiler): add condition to see whether the element may be an component\n\nfixes #10152" } ], "files": [ { "diff": "@@ -86,8 +86,12 @@ export function parse (\n platformMustUseProp = options.mustUseProp || no\n platformGetTagNamespace = options.getTagNamespace || no\n const isReservedTag = options.isReservedTag || no\n- maybeComponent = (el: ASTElement) => !!el.component || !isReservedTag(el.tag)\n-\n+ maybeComponent = (el: ASTElement) => !!(\n+ el.component ||\n+ el.attrsMap[':is'] ||\n+ el.attrsMap['v-bind:is'] ||\n+ !(el.attrsMap.is ? isReservedTag(el.attrsMap.is) : isReservedTag(el.tag))\n+ )\n transforms = pluckModuleFunction(options.modules, 'transformNode')\n preTransforms = pluckModuleFunction(options.modules, 'preTransformNode')\n postTransforms = pluckModuleFunction(options.modules, 'postTransformNode')", "filename": "src/compiler/parser/index.js", "status": "modified" }, { "diff": "@@ -881,4 +881,20 @@ describe('parser', () => {\n expect(ast.children[2].type).toBe(3)\n expect(ast.children[2].text).toBe('\\ndef')\n })\n+\n+ // #10152\n+ it('not warn when scoped slot used inside of dynamic component on regular element', () => {\n+ parse(`\n+ <div>\n+ <div is=\"customComp\" v-slot=\"slotProps\"></div>\n+ <div :is=\"'customComp'\" v-slot=\"slotProps\"></div>\n+ <div v-bind:is=\"'customComp'\" v-slot=\"slotProps\"></div>\n+ </div>\n+ `, baseOptions)\n+ expect('v-slot can only be used on components or <template>').not.toHaveBeenWarned()\n+\n+ parse(`<div is=\"customComp\"><template v-slot=\"slotProps\"></template></div>`, baseOptions)\n+ expect(`<template v-slot> can only appear at the root level inside the receiving the component`)\n+ .not.toHaveBeenWarned()\n+ })\n })", "filename": "test/unit/modules/compiler/parser.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n[https://jsfiddle.net/fp1omdaw/1/](https://jsfiddle.net/fp1omdaw/1/)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nIn a v-pre tag or child, use an attribute listed under the `isBooleanAttr` list in `src\\platforms\\web\\util\\attrs.js`\r\n\r\n### What is expected?\r\nThe attribute value is unchanged\r\n\r\n### What is actually happening?\r\nThe attribute value is changed, e.g. `open` has the value to set to `open`\r\n\r\n---\r\nWhen working with MathML, we need to use the `open` attribute\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [], "number": 10087, "title": "Attributes listed under isBooleanAttr are altered under v-pre attributed tags" }
{ "body": "Fix #10087\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\nWhen attributes under a v-pre match the list in `isBooleanAttr`, the value is replaced. This goes against the purpose of v-pre which is to leave all elements as-is.\r\n\r\nAn example of this bug can be found at https://jsfiddle.net/fp1omdaw/1/ as well as further details under #10087", "number": 10088, "review_comments": [ { "body": "Isn't this an old option that no longer exist? I think we can remove it", "created_at": "2019-07-24T07:23:23Z" }, { "body": "I don't think the test requires a component usage here, we could use a raw p or div", "created_at": "2019-07-24T07:23:58Z" }, { "body": "wouldn't checking `isInPre` first be more optimal?", "created_at": "2020-09-21T15:15:56Z" }, { "body": "```suggestion\r\n if (isInPre || el.tagName.indexOf('-') > -1) {\r\n```", "created_at": "2020-09-21T15:29:46Z" } ], "title": "fix(v-pre): do not alter attributes (fix #10087)" }
{ "commits": [ { "message": "fix(v-pre): do not alter attributes\n\nclose #10087" }, { "message": "fix(v-pre): do not alter attributes\n\nremove component and replace option from unit test" }, { "message": "refactor: use or" }, { "message": "perf: check boolean before index" } ], "files": [ { "diff": "@@ -39,7 +39,7 @@ function updateAttrs (oldVnode: VNodeWithData, vnode: VNodeWithData) {\n cur = attrs[key]\n old = oldAttrs[key]\n if (old !== cur) {\n- setAttr(elm, key, cur)\n+ setAttr(elm, key, cur, vnode.data.pre)\n }\n }\n // #4391: in IE9, setting type can reset value for input[type=radio]\n@@ -59,8 +59,8 @@ function updateAttrs (oldVnode: VNodeWithData, vnode: VNodeWithData) {\n }\n }\n \n-function setAttr (el: Element, key: string, value: any) {\n- if (el.tagName.indexOf('-') > -1) {\n+function setAttr (el: Element, key: string, value: any, isInPre: any) {\n+ if (isInPre || el.tagName.indexOf('-') > -1) {\n baseSetAttr(el, key, value)\n } else if (isBooleanAttr(key)) {\n // set attribute for blank value", "filename": "src/platforms/web/runtime/modules/attrs.js", "status": "modified" }, { "diff": "@@ -42,4 +42,13 @@ describe('Directive v-pre', function () {\n vm.$mount()\n expect(vm.$el.firstChild.tagName).toBe('VTEST')\n })\n+\n+ // #10087\n+ it('should not compile attributes', function () {\n+ const vm = new Vue({\n+ template: '<div v-pre><p open=\"hello\">A Test</p></div>'\n+ })\n+ vm.$mount()\n+ expect(vm.$el.firstChild.getAttribute('open')).toBe('hello')\n+ })\n })", "filename": "test/unit/features/directives/pre.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction link\r\n[https://codepen.io/action-hong/pen/eaKmOy](https://codepen.io/action-hong/pen/eaKmOy)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\n1. click \"show/hide button\" to hide button \"add 1\"\r\n2. click \"show/hide button\" to show button \"add 1\"\r\n3. click \"add 1\"\r\n\r\n### What is expected?\r\nRepeat step 1 and step 2, Click the \"add 1\" to add 1 to the number\r\n\r\n### What is actually happening?\r\nRepeat step 1 and step 2 , Click the \"add 1\" to add n(repeat times) to the number\r\n\r\n---\r\n5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "Seems like a bug indeed. As a workaround, you can pass a regular `@click` and handle it manually with `$listeners.click`\r\n\r\n```vue\r\n<button @click=\"$listeners.click\">{{ text }}</button>\r\n```", "created_at": "2019-05-29T08:47:18Z" }, { "body": "What?", "created_at": "2019-06-20T01:39:46Z" } ], "number": 10083, "title": "bug about keep-alive and @click.native caused repeated attach event handler?" }
{ "body": "Add removing all dom event listeners when vnode destroyed\r\nfixes #10083\r\nfixes #10004 \r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [ ] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 10085, "review_comments": [], "title": "fix(v-on): add removing all dom event listeners when vnode destroyed" }
{ "commits": [ { "message": "fix(v-on): add removing all dom event listeners when vnode destroyed" } ], "files": [ { "diff": "@@ -5,6 +5,7 @@ import { updateListeners } from 'core/vdom/helpers/index'\n import { isIE, isFF, supportsPassive, isUsingMicroTask } from 'core/util/index'\n import { RANGE_TOKEN, CHECKBOX_RADIO_TOKEN } from 'web/compiler/directives/model'\n import { currentFlushTimestamp } from 'core/observer/scheduler'\n+import { emptyNode } from 'core/vdom/patch'\n \n // normalize v-model event tokens that can only be determined at runtime.\n // it's important to place the event as the first in the array because\n@@ -108,13 +109,16 @@ function updateDOMListeners (oldVnode: VNodeWithData, vnode: VNodeWithData) {\n }\n const on = vnode.data.on || {}\n const oldOn = oldVnode.data.on || {}\n- target = vnode.elm\n+ // vnode is empty when removing all listeners,\n+ // and use old vnode dom element\n+ target = vnode.elm || oldVnode.elm\n normalizeEvents(on)\n updateListeners(on, oldOn, add, remove, createOnceHandler, vnode.context)\n target = undefined\n }\n \n export default {\n create: updateDOMListeners,\n- update: updateDOMListeners\n+ update: updateDOMListeners,\n+ destroy: (vnode: VNodeWithData) => updateDOMListeners(vnode, emptyNode)\n }", "filename": "src/platforms/web/runtime/modules/events.js", "status": "modified" }, { "diff": "@@ -1182,5 +1182,37 @@ describe('Component keep-alive', () => {\n }).then(done)\n }\n })\n+\n+ // #10083\n+ it('should not attach event handler repeatedly', done => {\n+ const vm = new Vue({\n+ template: `\n+ <keep-alive>\n+ <btn v-if=\"showBtn\" @click.native=\"add\" />\n+ </keep-alive>\n+ `,\n+ data: { showBtn: true, n: 0 },\n+ methods: {\n+ add () {\n+ this.n++\n+ }\n+ },\n+ components: {\n+ btn: { template: '<button>add 1</button>' }\n+ }\n+ }).$mount()\n+\n+ const btn = vm.$el\n+ expect(vm.n).toBe(0)\n+ btn.click()\n+ expect(vm.n).toBe(1)\n+ vm.showBtn = false\n+ waitForUpdate(() => {\n+ vm.showBtn = true\n+ }).then(() => {\n+ btn.click()\n+ expect(vm.n).toBe(2)\n+ }).then(done)\n+ })\n }\n })", "filename": "test/unit/features/component/component-keep-alive.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.10\r\n\r\n### Reproduction\r\n\r\n```html\r\n<!DOCTYPE html>\r\n<html lang=\"en\">\r\n <head>\r\n <meta charset=\"UTF-8\" />\r\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\" />\r\n <meta http-equiv=\"X-UA-Compatible\" content=\"ie=edge\" />\r\n <title>Document</title>\r\n </head>\r\n <body>\r\n <script src=\"https://unpkg.com/vue\"></script>\r\n\r\n <div id=\"app\">\r\n <div id=\"nav\">\r\n <button @click=\"goHome\">go to Home</button>\r\n <button @click=\"goAbout\">go to About</button>\r\n </div>\r\n <component :is=\"current\"></component>\r\n </div>\r\n\r\n <script>\r\n const Home = {\r\n name: 'Home',\r\n template: `\r\n <div>\r\n <h2>Home</h2>\r\n </div>\r\n `,\r\n }\r\n\r\n const About = {\r\n template: `\r\n <div class=\"about\">\r\n <h1>This is an about page</h1>\r\n <input type=\"text\" v-model=\"input\">\r\n </div>\r\n `,\r\n name: 'about',\r\n data: () => ({\r\n input: '',\r\n }),\r\n }\r\n\r\n const vm = new Vue({\r\n el: '#app',\r\n data() {\r\n return {\r\n current: 'Home',\r\n }\r\n },\r\n\r\n methods: {\r\n goHome() {\r\n this.current = 'Home'\r\n },\r\n goAbout() {\r\n this.current = 'About'\r\n },\r\n },\r\n components: { Home, About },\r\n })\r\n </script>\r\n </body>\r\n</html>\r\n```\r\n\r\n\r\n![Screen Shot 2019-05-09 at 18 59 07](https://user-images.githubusercontent.com/664177/57471906-8a224380-728c-11e9-832d-f1ce989ee1e0.png)\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\n\r\n- go to the about page\r\n- type in the input\r\n- leave the page\r\n- collect garbage and take a snapshot with devtools\r\n\r\n### What is expected?\r\n\r\nVueComponent count should be stable\r\n\r\n### What is actually happening?\r\n\r\nVueComponent count keeps increasing.\r\n\r\n---\r\n\r\nseems to be related to typing in the input\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "@posva After further testing. This might not be Vue's fault.\r\n\r\nTesting the code above on Chrome Version 72.0.3626.121 (mac) seems to run without any issues and VueComponent count is stable.", "created_at": "2019-05-09T22:37:30Z" }, { "body": "it isn't in 74. It could be a bug on Chrome. I've found leaks in the past (https://bugs.chromium.org/p/chromium/issues/detail?id=949587). I didn't test with this one though as it looks like Vue is retaining the component and the browser cannot free it", "created_at": "2019-05-10T06:44:47Z" }, { "body": "I can confirm this happens in Chrome 74. When trying to replicate this in Safari, I cannot even see any Vue related objects in their JS allocations inspector, but that is probably just my mistake somehow.\r\n\r\nHowever, if this doesn't happen in the earlier versions of Chrome with the same Vue version, this could be probably forwarded to Chromium project and closed here?", "created_at": "2019-05-10T08:21:23Z" }, { "body": "This is especially critical when we talk about large SPA's with tones of data. As far as I can see Chrome 74 is not able to collect much...", "created_at": "2019-05-10T09:09:22Z" }, { "body": "https://bugs.chromium.org/p/chromium/issues/detail?id=961494\r\n\r\nThere is some development regarding this issue.", "created_at": "2019-05-14T21:55:25Z" }, { "body": "Recently I have found that there is a problem while input element work with 'v-model'. If I type the content quickly, it cause the high CPU usage. \r\n![3E5A8D54478379E333140DDA975D8E0F](https://user-images.githubusercontent.com/11409080/58165130-ad4fe880-7cb9-11e9-89c4-16f4803307dd.jpg)\r\n", "created_at": "2019-05-22T09:47:43Z" }, { "body": "@posva or anyone knows how to test this with FF devtools and EDGE devtools?\r\nBecause we don't have the constructors names in those.\r\nThis is happening in other browsers like chrome and ff too.\r\nThanks.", "created_at": "2019-05-23T18:12:52Z" }, { "body": "@posva, i just found out PR #10085 can fix it, my Chrome Version is 74.0.3729.169.", "created_at": "2019-05-30T08:12:02Z" }, { "body": "Thanks for checking it out!", "created_at": "2019-05-30T09:00:23Z" }, { "body": "Does anybody know if this also happens in the PROD version? As I am not able to filter by \"vue\" components in Chrome DevTools. I see the same code as the dev version though...", "created_at": "2019-07-01T10:21:51Z" }, { "body": "> Does anybody know if this also happens in the PROD version? As I am not able to filter by \"vue\" components in Chrome DevTools. I see the same code as the dev version though...\r\n\r\n@clopezcapo Yes it does.", "created_at": "2019-07-01T10:28:57Z" }, { "body": "Bufff, and isn't that a huge issue for PROD environments? It looks like is a serious potential killer to me.\r\n\r\nDoes anybody know if they are aware?", "created_at": "2019-07-01T10:35:32Z" }, { "body": "@posva can you please update us on the status of this fix? As still happens in PROD.", "created_at": "2019-07-01T11:17:05Z" }, { "body": "this issue is too serious, it take me a long time to find it out...", "created_at": "2019-07-02T01:52:17Z" }, { "body": "So... any workarounds so far? Downgrade to v2.6.9?", "created_at": "2019-08-01T09:27:09Z" }, { "body": "I can confirm that PR #10085 fixes it for us as well. Is there a plan to merge this?", "created_at": "2020-02-14T10:54:08Z" }, { "body": "I also tried this solution, but only half of it worked. chrome80/vue2.6.11/vue-router 3.0.\r\nWhile vue component has been successfully recycled, three additional listeners have not been removed and dom cannot be recycled. Looking at the heap snapshot,the VueComponent increment is 0, but htmlInputElement has not been recycled, as Performance tools proves. I tracked that these 3 listeners comes from \"vue/src/platforms/web/runtime/directs/model.js\"-->function onCompositionStart and onCompositionEnd, But I'm not familiar with vue source code .\r\nwhen input element is deep in a large component, this leads to a large memory leak.", "created_at": "2020-03-03T08:37:13Z" }, { "body": "Excuse me?When can I wait until this problem is corrected?There is a serious memory leak in my project, and is in urgent need of a new version!", "created_at": "2020-09-03T08:45:34Z" }, { "body": "As a workaround, I wrap my inputs to:\r\n\r\n- manually add / remove event listeners\r\n- 'detach' the input from component DOM on beforeDestroy\r\n\r\nThis way 'only' the inputs (which have been edited) itself leak and not the whole component / view.\r\n\r\nSimplified:\r\n\r\n```\r\n<template>\r\n\r\n <div id=\"nativeInputContainer\">\r\n <input\r\n ref=\"nativeInput\"\r\n id=\"nativeInput\"\r\n v-bind:type=\"(password) ? 'password' : ''\"\r\n v-bind:placeholder=\"placeholder\"\r\n v-bind:value=\"workingValue\"\r\n />\r\n </div>\r\n\r\n</template>\r\n\r\n<script>\r\n\r\nexport default {\r\n name: 'BaseInput',\r\n inheritAttrs: false,\r\n components: {},\r\n props: {\r\n placeholder: {\r\n type: String,\r\n required: false,\r\n default: () => { return '' }\r\n },\r\n value: {\r\n type: String,\r\n required: true\r\n },\r\n password: {\r\n type: Boolean,\r\n required: false,\r\n default: () => { return false }\r\n }\r\n },\r\n data () {\r\n return {\r\n workingValue: ''\r\n }\r\n },\r\n watch: {\r\n\r\n value: {\r\n immediate: true,\r\n handler () {\r\n this.workingValue = this.value\r\n }\r\n }\r\n\r\n },\r\n methods: {\r\n\r\n onInput (event) {\r\n this.workingValue = event.target.value\r\n this.$emit('input', this.workingValue)\r\n },\r\n\r\n onChange (event) {\r\n this.$emit('change', this.workingValue)\r\n }\r\n\r\n },\r\n mounted () {\r\n // to avoid memory leak (from vue)\r\n // we assign event listeners manually\r\n this.$nextTick(() => {\r\n this.$refs.nativeInput.addEventListener('input', this.onInput)\r\n this.$refs.nativeInput.addEventListener('change', this.onChange)\r\n })\r\n },\r\n beforeDestroy () {\r\n // to avoid memory leak (from vue)\r\n // we remove event listeners manually\r\n this.$refs.nativeInput.removeEventListener('input', this.onInput)\r\n this.$refs.nativeInput.removeEventListener('change', this.onChange)\r\n\r\n // to avoid memory leak (from input itself)\r\n // which causes 'this' to be not available for gc\r\n // (undo history keeps input alive)\r\n // see: https://bugs.chromium.org/p/chromium/issues/detail?id=961494#c14\r\n this.$el\r\n .querySelector('#nativeInputContainer')\r\n .removeChild(\r\n this.$el.querySelector('#nativeInput')\r\n )\r\n // maybe not necessary but \"always Double Tap\"\r\n this.$refs.nativeInput = null\r\n }\r\n\r\n}\r\n</script>\r\n\r\n<style scoped>\r\n</style>\r\n```\r\n\r\n", "created_at": "2020-11-16T12:55:38Z" }, { "body": "Thanks for using Chrome! And sorry for bothering you... m(_ _)m\r\n\r\nChrome Canary 93.0.4535.2 (at leas) clears undo stack for removed <input>, <textarea> content editable. So, undo stack is no more source of memory leak. (http://crbug.com/961494)\r\n\r\nI attempt to take memory snapshot for posva's sample. Retainers of \"Detached HTMLInputElemnt\" are:\r\n\r\n1. ShadowRoot\r\n2. Detached HTMLInputElement\r\n3. Detached HTMLDivElement\r\n4. Detached Text\r\n5. **elm in VNode from vue:767**\r\n6. Blink roots\r\n7. Blink roots\r\n\r\nNote: You can make `ShadowRoot` to detached by `<input>.value = ''` (remove `Text` node) and `<input>.type ='hidden'` (remove `ShadowRoot`)\r\n\r\nI'm not sure Blink roots of 6 and 7. One of Blink roots may be caused by `<input>.focus()`.\r\n\r\nAnyway, Detached HTMLInputElement is only one, so memory issue is mitigated.\r\n\r\n\r\n\r\n", "created_at": "2021-06-09T05:14:05Z" } ], "number": 10004, "title": "Memory leak with component with input with v-model" }
{ "body": "Add removing all dom event listeners when vnode destroyed\r\nfixes #10083\r\nfixes #10004 \r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [ ] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [ ] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 10085, "review_comments": [], "title": "fix(v-on): add removing all dom event listeners when vnode destroyed" }
{ "commits": [ { "message": "fix(v-on): add removing all dom event listeners when vnode destroyed" } ], "files": [ { "diff": "@@ -5,6 +5,7 @@ import { updateListeners } from 'core/vdom/helpers/index'\n import { isIE, isFF, supportsPassive, isUsingMicroTask } from 'core/util/index'\n import { RANGE_TOKEN, CHECKBOX_RADIO_TOKEN } from 'web/compiler/directives/model'\n import { currentFlushTimestamp } from 'core/observer/scheduler'\n+import { emptyNode } from 'core/vdom/patch'\n \n // normalize v-model event tokens that can only be determined at runtime.\n // it's important to place the event as the first in the array because\n@@ -108,13 +109,16 @@ function updateDOMListeners (oldVnode: VNodeWithData, vnode: VNodeWithData) {\n }\n const on = vnode.data.on || {}\n const oldOn = oldVnode.data.on || {}\n- target = vnode.elm\n+ // vnode is empty when removing all listeners,\n+ // and use old vnode dom element\n+ target = vnode.elm || oldVnode.elm\n normalizeEvents(on)\n updateListeners(on, oldOn, add, remove, createOnceHandler, vnode.context)\n target = undefined\n }\n \n export default {\n create: updateDOMListeners,\n- update: updateDOMListeners\n+ update: updateDOMListeners,\n+ destroy: (vnode: VNodeWithData) => updateDOMListeners(vnode, emptyNode)\n }", "filename": "src/platforms/web/runtime/modules/events.js", "status": "modified" }, { "diff": "@@ -1182,5 +1182,37 @@ describe('Component keep-alive', () => {\n }).then(done)\n }\n })\n+\n+ // #10083\n+ it('should not attach event handler repeatedly', done => {\n+ const vm = new Vue({\n+ template: `\n+ <keep-alive>\n+ <btn v-if=\"showBtn\" @click.native=\"add\" />\n+ </keep-alive>\n+ `,\n+ data: { showBtn: true, n: 0 },\n+ methods: {\n+ add () {\n+ this.n++\n+ }\n+ },\n+ components: {\n+ btn: { template: '<button>add 1</button>' }\n+ }\n+ }).$mount()\n+\n+ const btn = vm.$el\n+ expect(vm.n).toBe(0)\n+ btn.click()\n+ expect(vm.n).toBe(1)\n+ vm.showBtn = false\n+ waitForUpdate(() => {\n+ vm.showBtn = true\n+ }).then(() => {\n+ btn.click()\n+ expect(vm.n).toBe(2)\n+ }).then(done)\n+ })\n }\n })", "filename": "test/unit/features/component/component-keep-alive.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.8\r\n\r\n### Reproduction link\r\nhttps://jsfiddle.net/shai/nujtes67/15/\r\n\r\n### Steps to reproduce\r\nPress the `Change screen` button to toggle between screen `1` and screen `2`.\r\n\r\n### What is expected?\r\nNothing should fade, and nothing should be logged to console, because the `v-if` within the `<transition>` is always `true`.\r\n\r\n### What is actually happening?\r\nOn each press B fades in again and `enter: B (custom component)` gets logged to the console.\r\n\r\n---\r\nI've included A as well as B, to show that the bug only occurs when using a custom component with a `<transition>` and `<slot>`. A uses the `<transition>` directly, and doesn't exhibit the buggy behavior.\r\n\r\nNB I've also noticed that if the custom component is switched to being functional instead, then it begins working correctly the same as A.\r\n\r\nSo it appears to only be reproducible when using a custom component and `<slot>`, even though this is what the docs recommend: https://vuejs.org/v2/guide/transitions.html#Reusable-Transitions\r\n\r\n**Or, it's possible that I've misunderstood the expected behavior of a `<transition>` inside of a keyed or otherwise newly appearing component. In which case there's still a bug**, because in that case A and B should BOTH be fading and logging to console (though that seems weird to me: the `<transition>` isn't `appear=true`, so the fact that its parent is newly appearing shouldn't affect it…)\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "Seems to be unrelated to slots as it also happens with\r\n\r\n```js\r\nVue.component('my-transition', {\r\n\ttemplate: `<transition name=\"fade\" @enter=\"enter\"> <p>heheeh</p></transition>`,\r\n methods: {\r\n \tenter() {\r\n \tconsole.log('enter: A (regular)');\r\n },\r\n }\r\n});\r\n```", "created_at": "2019-03-05T13:17:38Z" }, { "body": "Thanks. Updated repro: https://jsfiddle.net/shai/nujtes67/18/", "created_at": "2019-03-05T13:45:40Z" }, { "body": "I tested some behaviors of custom `my-transition` and built-in `transition`, but I'm not sure which ones are expected, like below:\r\n\r\n- [self has v-if](https://jsfiddle.net/df8czhkj/),results:\r\n> - transition without fade-in has fade-out\r\n> - my-transition has fade-in and fade-out\r\n\r\n- [parent has v-if](https://jsfiddle.net/9fbrsnq0/),results: \r\n> - transition without fade-in and fade-out\r\n> - my-transition has fade-in without fade-out\r\n\r\nIf you change v-if to v-show, the behavior will be different, I think this is also a bug. But now, I hope that people who use vue will tell me what is the correct behavior in the above situation?\r\n\r\nAfter clear purpose, if I have the ability to correct it correctly, I will continue to modify the code.\r\n", "created_at": "2019-03-08T13:10:06Z" } ], "number": 9628, "title": "Transition component animates when wrapper is replaced" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\nfixed #9628 \r\nTransition inside child component with v-if should be no enter-transition, it depend on appear attribute.", "number": 9668, "review_comments": [], "title": "fix(transition): looking up `context` of transition" }
{ "commits": [ { "message": "fix(#9628): fix looking up `context`" }, { "message": "test: update test case for transition" }, { "message": "test: add test case for transition" }, { "message": "test: add test case for transition" }, { "message": "docs: update modal example" } ], "files": [ { "diff": "@@ -10,7 +10,7 @@\n <body>\n <!-- template for the modal component -->\n <script type=\"text/x-template\" id=\"modal-template\">\n- <transition name=\"modal\">\n+ <transition name=\"modal\" appear>\n <div class=\"modal-mask\">\n <div class=\"modal-wrapper\">\n <div class=\"modal-container\">", "filename": "examples/modal/index.html", "status": "modified" }, { "diff": "@@ -66,8 +66,8 @@ export function enter (vnode: VNodeWithData, toggleDisplay: ?() => void) {\n let context = activeInstance\n let transitionNode = activeInstance.$vnode\n while (transitionNode && transitionNode.parent) {\n- transitionNode = transitionNode.parent\n context = transitionNode.context\n+ transitionNode = transitionNode.parent\n }\n \n const isAppear = !context._isMounted || !vnode.isRootInsert", "filename": "src/platforms/web/runtime/modules/transition.js", "status": "modified" }, { "diff": "@@ -842,7 +842,7 @@ if (!isIE9) {\n }).then(done)\n })\n \n- it('transition inside child component', done => {\n+ it('transition inside child component with v-if', done => {\n const vm = new Vue({\n template: `\n <div>\n@@ -872,14 +872,126 @@ if (!isIE9) {\n expect(vm.$el.children.length).toBe(0)\n vm.ok = true\n }).then(() => {\n- expect(vm.$el.children[0].className).toBe('test v-enter v-enter-active')\n+ expect(vm.$el.children[0].className).toBe('test')\n+ }).then(done)\n+ })\n+\n+ it('transition with appear inside child component with v-if', done => {\n+ const vm = new Vue({\n+ template: `\n+ <div>\n+ <test v-if=\"ok\" class=\"test\"></test>\n+ </div>\n+ `,\n+ data: { ok: true },\n+ components: {\n+ test: {\n+ template: `\n+ <transition appear\n+ appear-class=\"test-appear\"\n+ appear-to-class=\"test-appear-to\"\n+ appear-active-class=\"test-appear-active\">\n+ <div>foo</div>\n+ </transition>\n+ `\n+ }\n+ }\n+ }).$mount(el)\n+\n+ waitForUpdate(() => {\n+ expect(vm.$el.children[0].className).toBe('test test-appear test-appear-active')\n }).thenWaitFor(nextFrame).then(() => {\n- expect(vm.$el.children[0].className).toBe('test v-enter-active v-enter-to')\n+ expect(vm.$el.children[0].className).toBe('test test-appear-active test-appear-to')\n+ }).thenWaitFor(duration + buffer).then(() => {\n+ expect(vm.$el.children[0].className).toBe('test')\n+ vm.ok = false\n+ }).then(() => {\n+ expect(vm.$el.children[0].className).toBe('test v-leave v-leave-active')\n+ }).thenWaitFor(nextFrame).then(() => {\n+ expect(vm.$el.children[0].className).toBe('test v-leave-active v-leave-to')\n+ }).thenWaitFor(duration + buffer).then(() => {\n+ expect(vm.$el.children.length).toBe(0)\n+ }).then(done)\n+ })\n+\n+ it('transition inside nested child component with v-if', done => {\n+ const vm = new Vue({\n+ template: `\n+ <div>\n+ <foo v-if=\"ok\" class=\"test\"></foo>\n+ </div>\n+ `,\n+ data: { ok: true },\n+ components: {\n+ foo: {\n+ template: '<bar></bar>',\n+ components: {\n+ bar: {\n+ template: '<transition><div>foo</div></transition>'\n+ }\n+ }\n+ }\n+ }\n+ }).$mount(el)\n+\n+ // should not apply transition on initial render by default\n+ expect(vm.$el.innerHTML).toBe('<div class=\"test\">foo</div>')\n+ vm.ok = false\n+ waitForUpdate(() => {\n+ expect(vm.$el.children[0].className).toBe('test v-leave v-leave-active')\n+ }).thenWaitFor(nextFrame).then(() => {\n+ expect(vm.$el.children[0].className).toBe('test v-leave-active v-leave-to')\n }).thenWaitFor(duration + buffer).then(() => {\n+ expect(vm.$el.children.length).toBe(0)\n+ vm.ok = true\n+ }).then(() => {\n expect(vm.$el.children[0].className).toBe('test')\n }).then(done)\n })\n \n+ it('transition with appear inside nested child component with v-if', done => {\n+ const vm = new Vue({\n+ template: `\n+ <div>\n+ <foo v-if=\"ok\" class=\"test\"></foo>\n+ </div>\n+ `,\n+ data: { ok: true },\n+ components: {\n+ foo: {\n+ template: '<bar></bar>',\n+ components: {\n+ bar: {\n+ template: `\n+ <transition appear\n+ appear-class=\"test-appear\"\n+ appear-to-class=\"test-appear-to\"\n+ appear-active-class=\"test-appear-active\">\n+ <div>foo</div>\n+ </transition>\n+ `\n+ }\n+ }\n+ }\n+ }\n+ }).$mount(el)\n+\n+ waitForUpdate(() => {\n+ expect(vm.$el.children[0].className).toBe('test test-appear test-appear-active')\n+ }).thenWaitFor(nextFrame).then(() => {\n+ expect(vm.$el.children[0].className).toBe('test test-appear-active test-appear-to')\n+ }).thenWaitFor(duration + buffer).then(() => {\n+ expect(vm.$el.children[0].className).toBe('test')\n+ vm.ok = false\n+ }).then(() => {\n+ expect(vm.$el.children[0].className).toBe('test v-leave v-leave-active')\n+ }).thenWaitFor(nextFrame).then(() => {\n+ expect(vm.$el.children[0].className).toBe('test v-leave-active v-leave-to')\n+ }).thenWaitFor(duration + buffer).then(() => {\n+ expect(vm.$el.children.length).toBe(0)\n+ }).then(done)\n+ })\n+\n it('custom transition higher-order component', done => {\n const vm = new Vue({\n template: '<div><my-transition><div v-if=\"ok\" class=\"test\">foo</div></my-transition></div>',", "filename": "test/unit/features/transition/transition.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.6.6\r\n\r\n### Reproduction link\r\n[https://codepen.io/anon/pen/qgQYra?editors=1011](https://codepen.io/anon/pen/qgQYra?editors=1011)\r\n\r\n### Steps to reproduce\r\nThe problem seems to happen when I handle an event emitted from a component and return a rejected promise. To see the problem in action, open the codepen link, click the button and you will see in the console two logs of the same error, which means the Vue.config.errorHandler was called twice.\r\n\r\n### What is expected?\r\nI expect the errrorHandler called once.\r\n\r\n### What is actually happening?\r\nthe errorHandler function is called twice.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "@jfgodoy try using the async keyword with the promise, and lets see if it works for you. \r\nI have another alternative if it doesn't work though", "created_at": "2019-02-19T18:26:17Z" }, { "body": "hi @marcus-hiles, with async I have the same problem. Only a synchronous throw works", "created_at": "2019-02-19T20:52:04Z" } ], "number": 9511, "title": "Vue.config.errorHandler called twice when I return a rejected Promise from a handler of a component event" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\nfixed #9511 \r\n\r\nIn some cases, invokeWithErrorHandling will have nested calls, and Promise will independently catch multiple times, resulting in multiple calls to handleError.\r\n\r\nLook at a sample code:\r\n\r\n``` javascript\r\nvar p = Promise.reject('some err');\r\n\r\np.catch(() => console.log('1')); // p = p.catch(...)\r\np.catch(() => console.log('2'));\r\n```\r\nThe above code will output 1 and 2, but if you reassign the first catch call to p, it will not output 2.", "number": 9526, "review_comments": [], "title": "fix(#9511): avoid promise catch multiple times" }
{ "commits": [ { "message": "fix(#9511): avoid promise catch multiple times" }, { "message": "fix(#9511): add a test case for util/error/invokeWithErrorHandling" }, { "message": "fix(#9511): update test case for util/error/invokeWithErrorHandling" } ], "files": [ { "diff": "@@ -36,7 +36,9 @@ export function invokeWithErrorHandling (\n try {\n res = args ? handler.apply(context, args) : handler.call(context)\n if (res && !res._isVue && isPromise(res)) {\n- res.catch(e => handleError(e, vm, info + ` (Promise/async)`))\n+ // issue #9511\n+ // reassign to res to avoid catch triggering multiple times when nested calls\n+ res = res.catch(e => handleError(e, vm, info + ` (Promise/async)`))\n }\n } catch (e) {\n handleError(e, vm, info)", "filename": "src/core/util/error.js", "status": "modified" }, { "diff": "@@ -0,0 +1,23 @@\n+import Vue from 'vue'\n+import { invokeWithErrorHandling } from 'core/util/error'\n+\n+describe('invokeWithErrorHandling', () => {\n+ if (typeof Promise !== 'undefined') {\n+ it('should errorHandler call once when nested calls return rejected promise', done => {\n+ let times = 0\n+\n+ Vue.config.errorHandler = function () {\n+ times++\n+ }\n+\n+ invokeWithErrorHandling(() => {\n+ return invokeWithErrorHandling(() => {\n+ return Promise.reject(new Error('fake error'))\n+ })\n+ }).then(() => {\n+ expect(times).toBe(1)\n+ done()\n+ })\n+ })\n+ }\n+})", "filename": "test/unit/modules/util/invoke-with-error-handling.spec.js", "status": "added" } ] }
{ "body": "### Version\r\n2.5.22\r\n\r\n### Reproduction link\r\n[https://codesandbox.io/s/rm36pyr37q](https://codesandbox.io/s/rm36pyr37q)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nGiven a very simple component:\r\n\r\n```\r\n<template>\r\n <div v-bind:style=\"styleObject\">This div should have a large z-index</div>\r\n</template>\r\n\r\n<script>\r\nexport default {\r\n name: \"HelloWorld\",\r\n data() {\r\n return {\r\n styleObject: {\r\n maxWidth: \"400px\",\r\n minWidth: \"auto\",\r\n opacity: 1,\r\n padding: \"5px\",\r\n position: \"fixed\",\r\n right: \"10px\",\r\n top: \"10px\",\r\n zIndex: \"100 !important\" // this doesn't work\r\n // \"z-index\": \"100 !important\" // this works\r\n }\r\n };\r\n }\r\n};\r\n</script>\r\n```\r\n\r\n`zIndex` is not bound to style\r\n\r\n### What is expected?\r\nBinding `zIndex` does not get kebab-cased like `maxWidth` and `minWidth`. I expected it to work the same way. Passing `z-index` works as expected.\r\n\r\n### What is actually happening?\r\nThe `zIndex` variable is ignored entirely.\r\n\r\n---\r\nI was creating an alert-style box that needs a high z-index to appear \"on top\" of other elements on the page.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "FYI this is not specific to `zIndex`. It's caused by camelCase names not converted to kebab-case when the value contains `!important`.", "created_at": "2019-01-28T17:03:22Z" } ], "number": 9382, "title": "vbind:style does not work with zIndex" }
{ "body": "**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\nThe [documentation](https://developer.mozilla.org/en-US/docs/Web/API/CSSStyleDeclaration/setProperty#Parameters) states that the first parameter for `setProperty` must be hyphenated.\r\n\r\ncloses #9382", "number": 9386, "review_comments": [ { "body": "We only need hyphenated name here. Other modification can be discarded.\r\n\r\n```suggestion\r\n el.style.setProperty(hyphenate(name), val.replace(importantRE, ''), 'important')\r\n```", "created_at": "2019-01-29T16:32:39Z" }, { "body": "Thanks for the review, this was also my first thought but considering that setProperty requires a hyphenated parameter I don't see a problem of letting this change happen in all invocations. Also, in line 16 the normalize function will convert it to camelCase every time so I don't see a problem there too. If we ultimately want to keep the scope of the change smaller as possible I agree with your proposal but if not I think is better for consistency to not have different versions of the same information spread over the function.", "created_at": "2019-01-29T17:49:52Z" }, { "body": "It's mostly performance. If we can avoid the hyphenate call in some branches then we should.", "created_at": "2019-01-29T21:10:04Z" }, { "body": "Makes sense 😃! I have ammended the proposed fix into my last commit, thanks @yyx990803 and @Justineo!", "created_at": "2019-01-29T22:29:23Z" } ], "title": "style does not work with zIndex (fix #9382)" }
{ "commits": [ { "message": "fix(style): Fix property name case for setProperty\n\nFrom the documentation (MDN) style.setProperty must have its property name as hyphen case" } ], "files": [ { "diff": "@@ -1,7 +1,7 @@\n /* @flow */\n \n import { getStyle, normalizeStyleBinding } from 'web/util/style'\n-import { cached, camelize, extend, isDef, isUndef } from 'shared/util'\n+import { cached, camelize, extend, isDef, isUndef, hyphenate } from 'shared/util'\n \n const cssVarRE = /^--/\n const importantRE = /\\s*!important$/\n@@ -10,7 +10,7 @@ const setProp = (el, name, val) => {\n if (cssVarRE.test(name)) {\n el.style.setProperty(name, val)\n } else if (importantRE.test(val)) {\n- el.style.setProperty(name, val.replace(importantRE, ''), 'important')\n+ el.style.setProperty(hyphenate(name), val.replace(importantRE, ''), 'important')\n } else {\n const normalizedName = normalize(name)\n if (Array.isArray(val)) {", "filename": "src/platforms/web/runtime/modules/style.js", "status": "modified" }, { "diff": "@@ -101,6 +101,13 @@ describe('Directive v-bind:style', () => {\n }).then(done)\n })\n \n+ it('camelCase with !important', done => {\n+ vm.styles = { zIndex: '100 !important' }\n+ waitForUpdate(() => {\n+ expect(vm.$el.style.getPropertyPriority('z-index')).toBe('important')\n+ }).then(done)\n+ })\n+\n it('object with multiple entries', done => {\n vm.$el.style.color = 'red'\n vm.styles = {", "filename": "test/unit/features/directives/style.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.5.22\r\n\r\n### Reproduction link\r\n[https://codepen.io/anon/pen/jXdLGV](https://codepen.io/anon/pen/jXdLGV)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nOpen the repro and note that the rendered page says \"initial appear\".\r\n\r\n### What is expected?\r\nThe `@appear` hook should not be invoked and the page should just say \"initial\".\r\n\r\n### What is actually happening?\r\nThe `@appear` hook is invoked even though the `transition` doesn't have an `appear` attribute, and hence shouldn't be triggering on appear. The same thing happens if you add `:appear =\"false\"` to the `transition`.\r\n\r\n---\r\nRan into this when building some custom transition components where `appear` should be user-controlled, but was being invoked all the time instead.\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "It's also worth noting that we could improve the docs in this matter: https://vuejs.org/v2/guide/transitions.html#Transitions-on-Initial-Render\r\n\r\nI'm not sure what is exactly expected here, but I would also say no `appear` events if `appear=false`. It seems there is a `done` callback to it", "created_at": "2019-01-15T08:48:32Z" }, { "body": "Hey guys! I am learning this code base so I have implemented a fix for that without knowing if that would be ever needed, feel free to review it if you find it useful or disregard if not. Thanks!", "created_at": "2019-01-25T01:44:09Z" }, { "body": "I think this is not a code bug, but the document needs to be described more clearly.\r\n\r\nBoth `appear attribute` and `@appear hook` will cause an appear transition, and `@appear hook` will have a higher priority. If you don't want an appear transition, don't specify `@appear hook`. Because `@appear hook` has a higher priority, using `:appear=false` does not cancel the transition.", "created_at": "2019-03-11T07:22:26Z" } ], "number": 9320, "title": "Transition's @appear hook invoked even when appear not specified / falsy" }
{ "body": "<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\nWith this change will be possible to set `appear` to `false` and disable it dynamically. fix #9320\r\n\r\n### Appear set to false\r\n![image](https://user-images.githubusercontent.com/4977614/51719568-16d42900-2031-11e9-9591-2a05321e4342.png)\r\n\r\n### Appear defined but not set \r\n![image](https://user-images.githubusercontent.com/4977614/51719593-33706100-2031-11e9-85cb-230ea21333cb.png)\r\n\r\n### Appear not defined\r\nNotice that consider `appear` to be `False` when absent would require either a change for the corner case or a breaking change\r\n![image](https://user-images.githubusercontent.com/4977614/51719626-53a02000-2031-11e9-9b47-2ffea06e9422.png)\r\n\r\n", "number": 9366, "review_comments": [ { "body": "return typeof attr === 'undefined' || attr === false || attr === 'false'", "created_at": "2019-01-25T02:22:45Z" }, { "body": "and another test for `appear=\"false\"`", "created_at": "2019-01-25T02:24:48Z" }, { "body": "appear is a boolean, passing a string isn't supported", "created_at": "2019-01-25T10:06:01Z" }, { "body": "Bad late night code 😟 \r\n\r\nThanks a lot for point that out!", "created_at": "2019-01-25T12:03:31Z" } ], "title": "Explicitly disable a transition by setting it to false (fix #9320)" }
{ "commits": [ { "message": "fix(transition): Explicitly disable appear\n\nThe @appear hook is being invoked when appear is not specified or is falsy. This change make it\npossible to dynamically disable it by setting :appear=\"false\" when needed." }, { "message": "fix(transition): Improve active attributes filter implementation" } ], "files": [ { "diff": "@@ -40,6 +40,11 @@ function getRealChild (vnode: ?VNode): ?VNode {\n }\n }\n \n+function activeAttr (k: string): boolean {\n+ // assuming active if undefined\n+ return this[camelize(k)] === undefined || this[camelize(k)] === true\n+}\n+\n export function extractTransitionData (comp: Component): Object {\n const data = {}\n const options: ComponentOptions = comp.$options\n@@ -49,9 +54,11 @@ export function extractTransitionData (comp: Component): Object {\n }\n // events.\n // extract listeners and pass them directly to the transition methods\n- const listeners: ?Object = options._parentListeners\n- for (const key in listeners) {\n- data[camelize(key)] = listeners[key]\n+ const listeners: Object = options._parentListeners || {}\n+ // remove the listeners that are explicitily disabled\n+ const listenerKeys: Array<string> = Object.keys(listeners).filter(activeAttr, data)\n+ for (let i = 0; i < listenerKeys.length; i++) {\n+ data[camelize(listenerKeys[i])] = listeners[listenerKeys[i]]\n }\n return data\n }", "filename": "src/platforms/web/runtime/components/transition.js", "status": "modified" }, { "diff": "@@ -769,6 +769,30 @@ if (!isIE9) {\n }).then(done)\n })\n \n+ it('appear: false', done => {\n+ let next\n+ const vm = new Vue({\n+ template: `\n+ <div>\n+ <transition name=\"test\" :appear=\"false\" @appear=\"appear\" >\n+ <div v-if=\"ok\" class=\"test\">foo</div>\n+ </transition>\n+ </div>\n+ `,\n+ data: { ok: true },\n+ methods: {\n+ appear: (el, cb) => {\n+ next = cb\n+ }\n+ }\n+ }).$mount(el)\n+\n+ waitForUpdate(() => {\n+ expect(vm.$el.children[0].className).toBe('test')\n+ expect(next).toBeUndefined()\n+ }).then(done)\n+ })\n+\n it('transition on SVG elements', done => {\n const vm = new Vue({\n template: `", "filename": "test/unit/features/transition/transition.spec.js", "status": "modified" } ] }
{ "body": "**Note: this issue only refers to v-model for components, and is not considering v-model on DOM elements.** There was a discussion which included this unexpected behavior in #6216 , and also a PR #6327 that tried to fixed that, but those also included being able to pass-through `v-model` to DOM elements, which is not the case with this issue.\r\n\r\n### Version\r\n2.5.22\r\n\r\n### Reproduction link\r\n[https://codesandbox.io/s/933003yx6w](https://codesandbox.io/s/933003yx6w)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\nUse `v-model` on a component that doesn't have `value` defined in its `props`.\r\n\r\n### What is expected?\r\n`value` is added to component's instance `$attrs`\r\n\r\n### What is actually happening?\r\n`value` is not added to component's instance `$attrs`\r\n\r\n---\r\nBecause of that, when wrapping **a component** and passing through the props and event handlers using `v-bind=\"$attrs\" v-on=\"$listeners\"`, `v-model` will not be (completely) passed through, and will require explicitly defining `value` in wrapping component's `props` and passing it using `:value=\"value\"` to the wrapped component (see the linked sandbox).\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "I already created a test that highlights the issue, and fixed it like so:\r\n```diff\r\n--- a/src/core/vdom/create-component.js\r\n+++ b/src/core/vdom/create-component.js\r\n@@ -250,7 +250,8 @@ function mergeHook (f1: any, f2: any): Function {\r\n function transformModel (options, data: any) {\r\n const prop = (options.model && options.model.prop) || 'value'\r\n const event = (options.model && options.model.event) || 'input'\r\n- ;(data.props || (data.props = {}))[prop] = data.model.value\r\n+ const addTo = (options.props && prop in options.props) ? 'props' : 'attrs'\r\n+ ;(data[addTo] || (data[addTo] = {}))[prop] = data.model.value\r\n const on = data.on || (data.on = {})\r\n const existing = on[event]\r\n const callback = data.model.callback\r\n```\r\n\r\nIf you agree this is actually a bug that needs fixing, I can quickly open a PR.", "created_at": "2019-01-16T16:52:09Z" }, { "body": "yes, go ahead and create the PR. This would be more consistant. It already works with `$listeners` because they are bound to the `on` property", "created_at": "2019-01-16T23:30:30Z" } ], "number": 9330, "title": "v-model's value not in $attrs if value not defined as a prop" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nFixes #9330 , see that for details.", "number": 9331, "review_comments": [], "title": "fix(v-model): add value to $attrs if not defined in props (fix #9330)" }
{ "commits": [ { "message": "fix(v-model): add failing test for #9330." }, { "message": "fix(v-model): add value to $attrs if not defined in props (fix #9330)" } ], "files": [ { "diff": "@@ -250,7 +250,8 @@ function mergeHook (f1: any, f2: any): Function {\n function transformModel (options, data: any) {\n const prop = (options.model && options.model.prop) || 'value'\n const event = (options.model && options.model.event) || 'input'\n- ;(data.props || (data.props = {}))[prop] = data.model.value\n+ const addTo = (options.props && prop in options.props) ? 'props' : 'attrs'\n+ ;(data[addTo] || (data[addTo] = {}))[prop] = data.model.value\n const on = data.on || (data.on = {})\n const existing = on[event]\n const callback = data.model.callback", "filename": "src/core/vdom/create-component.js", "status": "modified" }, { "diff": "@@ -204,4 +204,30 @@ describe('Directive v-model component', () => {\n expect(triggerCount).toBe(1)\n document.body.removeChild(vm.$el)\n })\n+\n+ // #9330\n+ it('should add value to $attrs if not defined in props', () => {\n+ const TestComponent = {\n+ inheritAttrs: false,\n+ render (h) {\n+ return h('div', this.$attrs.value)\n+ }\n+ }\n+\n+ const vm = new Vue({\n+ components: {\n+ TestComponent\n+ },\n+ template: `\n+ <div>\n+ <test-component v-model=\"val\"/>\n+ </div>\n+ `,\n+ data: {\n+ val: 'foo'\n+ }\n+ }).$mount()\n+\n+ expect(vm.$el.innerHTML).toBe('<div>foo</div>');\n+ })\n })", "filename": "test/unit/features/directives/model-component.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.5.2\r\n\r\n### Reproduction link\r\n[https://codesandbox.io/s/ympx4pk72z](https://codesandbox.io/s/ympx4pk72z)\r\n\r\n### Steps to reproduce\r\n1、click the home link\r\n2、click the test link\r\n\r\n### What is expected?\r\nafter the Home component destroyed,its updated hook should not be called\r\n\r\n### What is actually happening?\r\nafter the Home component destroyed,its updated hook is called\r\n\r\n---\r\nhere is the log information\r\n\r\nHome beforeCreate\r\nHome created\r\nHome beforeMount\r\nHome mounted\r\nTest beforeCreate\r\nTest created\r\nTest beforeMount\r\nHome beforeDestroy\r\nHome destroyed\r\nTest mounted\r\nHome updated\r\n\r\nwhy the Home component can call updated hook after it is destroyed?\r\nIs Vue designed to do so?\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "@BL-Lac149597870 In your reproduction link, you have not defined your components as `.vue` component. ", "created_at": "2018-04-25T15:38:02Z" }, { "body": "@mbj36 my bad.Now i have just corrected the mistake and changed the repo link,but the problem still remains...", "created_at": "2018-04-26T00:54:37Z" }, { "body": "@BL-Lac149597870 It's always better to use computed property or watcher - See the api docs here - https://vuejs.org/v2/api/#updated", "created_at": "2018-04-26T05:27:01Z" }, { "body": "@mbj36 Thank you for your reply. But I didn't perform any DOM-dependent operations when the updated hook was called.And according to Vue lifecycle,after one component is destroyed,its updated hook shouldn't be called.But in my repo,this condition happens.I can't figure out how this happens.", "created_at": "2018-04-26T05:56:45Z" }, { "body": "Met this problem, too.\r\nAny news for this?😂\r\n @BL-Lac149597870 ", "created_at": "2018-05-14T07:01:08Z" }, { "body": "@shockw4ver Maybe we can work it out together,hahaha~", "created_at": "2018-05-22T06:10:59Z" }, { "body": "I hate to bring bad news, but the fix only triggers the update part of the lifecycle, leaving `beforeUpdate` still being invoked after the component is destroyed.\r\n\r\nUse the sandbox above with ` \"vue\": \"^2.5.18-beta.0\",` to verify\r\n(xposted on the PR accepted)", "created_at": "2018-12-07T10:35:40Z" } ], "number": 8076, "title": "component calls update hook after it is destroyed" }
{ "body": "…stroyed, fix #8076\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [X] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [X] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [X] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [X] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [X] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [X] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\nThis properly fixes #8076 since PR #8381 did not cover beforeUpdate.", "number": 9171, "review_comments": [ { "body": "I think you meant `beforeUpdate`\r\n```suggestion\r\n const beforeUpdate = jasmine.createSpy('beforeUpdate')\r\n```", "created_at": "2018-12-10T10:55:41Z" }, { "body": "```suggestion\r\n expect(beforeUpdate).not.toHaveBeenCalled()\r\n```", "created_at": "2018-12-10T10:56:16Z" } ], "title": "fix(lifecycle): beforeUpdated should not be called if component is de…" }
{ "commits": [ { "message": "fix(lifecycle): beforeUpdated should not be called if component is destroyed" }, { "message": "Update test/unit/features/options/lifecycle.spec.js\r\n\r\nfix typo on lifecycle event name (tx @posva)\n\nCo-Authored-By: therealpecus <pecus@pecus.it>" }, { "message": "fix(lifecycle): beforeUpdated should not be called if component is destroyed, fix #8076\n\nfix references to beforeUpdate spy" } ], "files": [ { "diff": "@@ -196,7 +196,7 @@ export function mountComponent (\n // component's mounted hook), which relies on vm._watcher being already defined\n new Watcher(vm, updateComponent, noop, {\n before () {\n- if (vm._isMounted) {\n+ if (vm._isMounted && !vm._isDestroyed) {\n callHook(vm, 'beforeUpdate')\n }\n }", "filename": "src/core/instance/lifecycle.js", "status": "modified" }, { "diff": "@@ -152,6 +152,43 @@ describe('Options lifecycle hooks', () => {\n expect(vm.$el.textContent).toBe('bar!')\n }).then(done)\n })\n+\n+ // #8076\n+ it('should not be called after destroy', done => {\n+ const beforeUpdate = jasmine.createSpy('beforeUpdate')\n+ const destroyed = jasmine.createSpy('destroyed')\n+\n+ Vue.component('todo', {\n+ template: '<div>{{todo.done}}</div>',\n+ props: ['todo'],\n+ destroyed,\n+ beforeUpdate\n+ })\n+\n+ const vm = new Vue({\n+ template: `\n+ <div>\n+ <todo v-for=\"t in pendingTodos\" :todo=\"t\" :key=\"t.id\"></todo>\n+ </div>\n+ `,\n+ data () {\n+ return {\n+ todos: [{ id: 1, done: false }]\n+ }\n+ },\n+ computed: {\n+ pendingTodos () {\n+ return this.todos.filter(t => !t.done)\n+ }\n+ }\n+ }).$mount()\n+\n+ vm.todos[0].done = true\n+ waitForUpdate(() => {\n+ expect(destroyed).toHaveBeenCalled()\n+ expect(beforeUpdate).not.toHaveBeenCalled()\n+ }).then(done)\n+ })\n })\n \n describe('updated', () => {", "filename": "test/unit/features/options/lifecycle.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.5.17\r\n\r\n### Reproduction link\r\n[https://github.com/sqal/vue-ssr-bug-reproduction](https://github.com/sqal/vue-ssr-bug-reproduction)\r\n\r\n\r\n\r\n\r\n\r\n### Steps to reproduce\r\n- Clone, install dependencies\r\n- run `npm run ssr:serve`\r\n\r\n\r\n\r\n### What is expected?\r\nShould render the app.\r\n\r\n### What is actually happening?\r\nWhen you start the server you should see that application throws following error: `Cannot read property '$store' of undefined`. This happens because in SSR computed property don't receive component's instance as the first argument, therefore I cannot access `$store` property. I am not sure if this is a bug or not, I think it is because i could not find anything about in vue or vue-ssr docs. I would appreciate if someone could clarify this issue. Thank you :)\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "I was sure I opened an issue or PR about this but apparently I didn't 😆 ", "created_at": "2018-10-22T17:44:22Z" } ], "number": 8977, "title": "Computed property don't receive component's instance as first argument in SSR" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [ ] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [ ] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\n**Other information:**\r\n\r\nFixes #8977", "number": 9090, "review_comments": [], "title": "fix(ssr): computed properties first argument" }
{ "commits": [ { "message": "fix(ssr): computed properties first argument" } ], "files": [ { "diff": "@@ -216,13 +216,13 @@ export function defineComputed (\n if (typeof userDef === 'function') {\n sharedPropertyDefinition.get = shouldCache\n ? createComputedGetter(key)\n- : userDef\n+ : createGetterInvoker(userDef)\n sharedPropertyDefinition.set = noop\n } else {\n sharedPropertyDefinition.get = userDef.get\n ? shouldCache && userDef.cache !== false\n ? createComputedGetter(key)\n- : userDef.get\n+ : createGetterInvoker(userDef.get)\n : noop\n sharedPropertyDefinition.set = userDef.set\n ? userDef.set\n@@ -255,6 +255,12 @@ function createComputedGetter (key) {\n }\n }\n \n+function createGetterInvoker(fn) {\n+ return function computedGetter () {\n+ return fn.call(this, this)\n+ }\n+}\n+\n function initMethods (vm: Component, methods: Object) {\n const props = vm.$options.props\n for (const key in methods) {", "filename": "src/core/instance/state.js", "status": "modified" }, { "diff": "@@ -1076,6 +1076,24 @@ describe('SSR: renderToString', () => {\n })\n })\n \n+ // #8977\n+ it('should call computed properties with vm as first argument', done => {\n+ renderToString(new Vue({\n+ data: {\n+ firstName: 'Evan',\n+ lastName: 'You'\n+ },\n+ computed: {\n+ fullName: ({ firstName, lastName }) => `${firstName} ${lastName}`,\n+ },\n+ template: '<div>{{ fullName }}</div>',\n+ }), (err, result) => {\n+ expect(err).toBeNull()\n+ expect(result).toContain('<div data-server-rendered=\"true\">Evan You</div>')\n+ done()\n+ })\n+ })\n+\n it('return Promise', done => {\n renderToString(new Vue({\n template: `<div>{{ foo }}</div>`,", "filename": "test/ssr/ssr-string.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.5.16\r\n\r\n### Reproduction link\r\n[https://codesandbox.io/s/wkpy6x5r25](https://codesandbox.io/s/wkpy6x5r25)\r\n\r\n### Steps to reproduce\r\nSimply opening and running the sandbox should show the error. Instead of getting an alert that App.vue caught the error in HelloWorld's watcher, the error escapes all the way to the top level. To see the expected behavior, open Hello World and set the watcher to immediate: false. Then hit the button to trigger the watcher. In this case the error is caught by the parent, App.vue. In the immediate case, this does not happen.\r\n\r\n### What is expected?\r\nErrors in immediate watchers should be caught by the parent component(s)'s errorCaptured().\r\n\r\n### What is actually happening?\r\nThe error escapes to the nextTick hander. This handler has no ctx or vm to pass to the errorHandler. When errorHandler does not receive a vm, it cannot call the parent errorCaptured handlers.\r\n\r\n---\r\nThis appears to be an oversight in src/core/instance/state.js. In the case options.immediate is true, cb.call(vm, watcher.value) should be wrapped in a try catch that explicitly calls handleError with the vm.\r\n\r\nA proposed fix:\r\n```\r\n if (options.immediate) {\r\n try {\r\n cb.call(vm, watcher.value)\r\n } catch (e) {\r\n handleError(e, vm, `immediate call for watcher ${watcher.expression}`)\r\n }\r\n }\r\n```\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [], "number": 8567, "title": "Immediate watcher skips parent(s) errorCaptured and jumps straight to the top error handler" }
{ "body": "As stated in the [related issue](https://github.com/vuejs/vue/issues/8567), the handle callback call should be wrapped in a try/catch that explicitly calls handleError.\r\n\r\nfix #8567\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\nThis is my first PR so feel free to provide feedback or request modifications both in the code and the PR itself! 😄 ", "number": 8581, "review_comments": [ { "body": "Should we call `watcher.getAndInvoke` @yyx990803 ?", "created_at": "2018-07-31T10:13:40Z" } ], "title": "fix(#8567): handle errors on immediate watchers" }
{ "commits": [ { "message": "fix(error handling): handle errors on immediate watcher execution\n\nThe handle callback call should be wrapped in a try/catch that explicitly calls handleError\n\nfix #8567" } ], "files": [ { "diff": "@@ -346,7 +346,11 @@ export function stateMixin (Vue: Class<Component>) {\n options.user = true\n const watcher = new Watcher(vm, expOrFn, cb, options)\n if (options.immediate) {\n- cb.call(vm, watcher.value)\n+ try {\n+ cb.call(vm, watcher.value)\n+ } catch (error) {\n+ handleError(error, vm, `callback for immediate watcher \"${watcher.expression}\"`)\n+ }\n }\n return function unwatchFn () {\n watcher.teardown()", "filename": "src/core/instance/state.js", "status": "modified" }, { "diff": "@@ -92,6 +92,16 @@ describe('Error handling', () => {\n }).then(done)\n })\n \n+ it('should recover from errors in user immediate watcher callback', done => {\n+ const vm = createTestInstance(components.userImmediateWatcherCallback)\n+ waitForUpdate(() => {\n+ expect(`Error in callback for immediate watcher \"n\"`).toHaveBeenWarned()\n+ expect(`Error: userImmediateWatcherCallback error`).toHaveBeenWarned()\n+ }).thenWaitFor(next => {\n+ assertBothInstancesActive(vm).end(next)\n+ }).then(done)\n+ })\n+\n it('config.errorHandler should capture render errors', done => {\n const spy = Vue.config.errorHandler = jasmine.createSpy('errorHandler')\n const vm = createTestInstance(components.render)\n@@ -234,6 +244,21 @@ function createErrorTestComponents () {\n }\n }\n \n+ components.userImmediateWatcherCallback = {\n+ props: ['n'],\n+ watch: {\n+ n: {\n+ immediate: true,\n+ handler () {\n+ throw new Error('userImmediateWatcherCallback error')\n+ }\n+ }\n+ },\n+ render (h) {\n+ return h('div', this.n)\n+ }\n+ }\n+\n // event errors\n components.event = {\n beforeCreate () {", "filename": "test/unit/features/error-handling.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.5.16\r\n\r\n### Reproduction link\r\n[https://jsfiddle.net/hL0rrbs9/6/](https://jsfiddle.net/hL0rrbs9/6/)\r\n\r\n### Steps to reproduce\r\nRun code, and watch.\r\n\r\n### What is expected?\r\n\"Yay Yay ;)\" values should not change to \"hell naw!\" in 3 seconds.\r\n\r\n### What is actually happening?\r\n\"Yay Yay ;)\" values are changing in to \"hell naw!\" in 3 seconds.\r\n\r\n---\r\n<h2>\r\n Wait 3 seconds. Behavior is not consistent. \"component\" tag in v-for should not change. \r\n</h2>\r\n<div id=\"app\">\r\n <component :is=\"comp\" v-once></component>\r\n <p v-once>{{comp}}</p>\r\n \r\n <div v-for=\"n in items\" :key=\"n.id\">\r\n <component :is=\"comp\" v-once></component>\r\n <p v-once>{{comp}}</p>\r\n </div>\r\n</div>\r\n<script>\r\nvar z = new Vue({\r\n\tel: \"#app\",\r\n data: {\r\n \tcomp:\t\"comp1\",\r\n items: [{id:1}, {id:2}]\r\n },\r\n components: {\r\n \t\"comp1\": {\r\n \ttemplate: \"<p style='background:green;color:white'>yay yay ;)</p>\"\r\n },\r\n \t\"comp2\": {\r\n \ttemplate: \"<p style='background:red;color:white'>hell naw!</p>\"\r\n }\r\n }\r\n});\r\n\r\nsetTimeout(function() {\r\n\tz.comp = \"comp2\"\r\n}, 3000);\r\n</script>\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "`v-once` should be in `v-for`, not in its children.", "created_at": "2018-04-15T12:31:59Z" }, { "body": "1. If v-once is in v-for, the iteration is rendered once. Ex: If there are 2 children originally, and a child is added later, the 3rd child is not rendered. This is not the scenario.\r\n\r\n2. if :key is provided to v-for, the idea is to patch each element in-place. Ex: In 1st item's example, the existing 2 children are not re-rendered. Hence the binding persists.\r\n\r\n3. In my JSFiddle example, **_<p v-once>{{comp}}</p>_** is behaving as expected in v-for. v-once is placed in v-for's child. \"comp1\" text does not change to \"comp2\". \r\n\r\n4. In my JSFiddle example, **_component_** is **NOT** behaving as expected in v-for. v-once is placed in v-for's child. \"yay yay ;)\" text does change to \"hell naw!\". This is the bug.\r\n\r\nGiven above, the expected behavior for v-for to still be bound to data and render when children array count changes. But each child to render only once. In this scenario, the v-once behavior is required in children, not the parent.\r\n\r\nIn short, this behavior is currently consistent except for **component** element. JS line 10 is working fine. Line 9 is not.\r\n\r\n\r\n", "created_at": "2018-04-15T13:07:37Z" } ], "number": 8021, "title": "v-once for component tag doesn't work in v-for" }
{ "body": "<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\nif change `comp1` to `comp2`, `<component :is='comp'>` vnode's `tag` will be changed, but `<p>` won't, `tag` is not equal will not be as same vnode. \r\nI am not sure it cause other bugs, forgive me. \r\nthanks for your time.\r\n\r\nClose #8021", "number": 8499, "review_comments": [], "title": "fix(patch): static tree as same vnode(#8021)" }
{ "commits": [ { "message": "fix(patch): static tree should ba as same vnode(#8021)" } ], "files": [ { "diff": "@@ -44,7 +44,7 @@ function sameVnode (a, b) {\n isTrue(a.isAsyncPlaceholder) &&\n a.asyncFactory === b.asyncFactory &&\n isUndef(b.asyncFactory.error)\n- )\n+ ) || sameStaticVnode(a, b)\n )\n )\n }\n@@ -57,6 +57,12 @@ function sameInputType (a, b) {\n return typeA === typeB || isTextInputType(typeA) && isTextInputType(typeB)\n }\n \n+function sameStaticVnode (a, b) {\n+ return isTrue(a.isStatic) &&\n+ isTrue(b.isStatic) &&\n+ (isTrue(b.isCloned) || isTrue(b.isOnce))\n+}\n+\n function createKeyToOldIdx (children, beginIdx, endIdx) {\n let i, key\n const map = {}\n@@ -518,10 +524,9 @@ export function createPatchFunction (backend) {\n // note we only do this if the vnode is cloned -\n // if the new node is not cloned it means the render functions have been\n // reset by the hot-reload-api and we need to do a proper re-render.\n- if (isTrue(vnode.isStatic) &&\n- isTrue(oldVnode.isStatic) &&\n- vnode.key === oldVnode.key &&\n- (isTrue(vnode.isCloned) || isTrue(vnode.isOnce))\n+ if (\n+ sameStaticVnode(oldVnode, vnode) &&\n+ vnode.key === oldVnode.key\n ) {\n vnode.componentInstance = oldVnode.componentInstance\n return", "filename": "src/core/vdom/patch.js", "status": "modified" }, { "diff": "@@ -176,6 +176,42 @@ describe('Directive v-once', () => {\n }).then(done)\n })\n \n+ it('should work inside v-for in component', done => {\n+ const vm = new Vue({\n+ data: {\n+ comp: 'comp1',\n+ list: [\n+ { id: 0 }\n+ ]\n+ },\n+ components: {\n+ comp1: {\n+ template: '<span>comp1</span>'\n+ },\n+ comp2: {\n+ template: '<span>comp2</span>'\n+ }\n+ },\n+ template: `\n+ <div>\n+ <div v-for=\"i in list\" :key=\"i.id\">\n+ <component :is='comp' v-once></component>\n+ </div>\n+ </div>\n+ `\n+ }).$mount()\n+\n+ expect(vm.$el.textContent).toBe('comp1')\n+\n+ vm.comp = 'comp2'\n+ waitForUpdate(() => {\n+ expect(vm.$el.textContent).toBe('comp1')\n+ vm.list.push({ id: 1 })\n+ }).then(() => {\n+ expect(vm.$el.textContent).toBe('comp1comp2')\n+ }).then(done)\n+ })\n+\n it('should work inside v-for with v-if', done => {\n const vm = new Vue({\n data: {", "filename": "test/unit/features/directives/once.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.4.2\r\n\r\n### Reproduction link\r\n[https://jsbin.com/qejofexedo/edit?html,js,output](https://jsbin.com/qejofexedo/edit?html,js,output)\r\n\r\n### Steps to reproduce\r\nsee reproduction link.\r\n\r\n### What is expected?\r\nWhen I click `Expand is True`, then `expand` to become `false`. And only `countA` changed.\r\n\r\n### What is actually happening?\r\nWhen I click `Expand is Ture`, nothing happened.\r\nThe `countA` and `countB` changed.\r\nI guess when I click, `expand` changed to `false`, but immediate the click event triggered. It executes another vnode click event. Then expand changed to `true`.\r\n\r\n### And More\r\n\r\n* If I rename the second div to another tag name, such as `p`, `section`, no errors occur.\r\n* If I move click event from `i` tag to parent `div` tag in the first div, no errors occur\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "![qq20170911-185025](https://user-images.githubusercontent.com/6831019/30271250-26f0e358-9722-11e7-8475-ec9f85d8b43b.png)\r\nseems normal", "created_at": "2017-09-11T10:52:07Z" }, { "body": "Your repro is working as intended...", "created_at": "2017-09-11T14:32:38Z" }, { "body": "@Kingwl @yyx990803 Sorry about that. I test others case and forgot to change back.\r\n\r\nThe important code is\r\n```html\r\n<div class=\"header\" v-if=\"expand\"> // block 1\r\n <i @click=\"expand = false, countA++\">Expand is True</i> // element 1\r\n</div>\r\n<div class=\"expand\" v-if=\"!expand\" @click=\"expand = true, countB++\"> // block 2\r\n <i>Expand is False</i> // element 2\r\n</div>\r\n```\r\n\r\nThere is four case:\r\n\r\n* click event listen on `block 1` and `block2`, works well\r\n* click event listen on `element 1` and `element 2`, works well\r\n* click event listen on `block 1` and `element 2`, change `expand` to true is ok. But cannot change back.\r\n* click event listen on `element 1` and `block 2`, cannot change `expand` to false. But can change `expand` to true.", "created_at": "2017-09-11T15:47:10Z" }, { "body": "So, this happens because:\r\n\r\n- The inner click event on `<i>` fires, triggering a 1st update on nextTick (microtask)\r\n- **The microtask is processed before the event bubbles to the outer div**. During the update, a click listener is added to the outer div.\r\n- Because the DOM structure is the same, both the outer div and the inner element are reused.\r\n- The event finally reaches outer div, triggers the listener added by the 1st update, in turn triggering a 2nd update.\r\n\r\nThis is quite tricky in fix, and other libs that leverages microtask for update queueing also have this problem (e.g. Preact). React doesn't seem to have this problem because they use a synthetic event system (probably due to edge cases like this).\r\n\r\nTo work around it, you can simply give the two outer divs different keys to force them to be replaced during updates. This would prevent the bubbled event to be picked up:\r\n\r\n``` html\r\n<div class=\"header\" v-if=\"expand\" key=\"1\"> // block 1\r\n <i @click=\"expand = false, countA++\">Expand is True</i> // element 1\r\n</div>\r\n<div class=\"expand\" v-if=\"!expand\" @click=\"expand = true, countB++\" key=\"2\"> // block 2\r\n <i>Expand is False</i> // element 2\r\n</div>\r\n```", "created_at": "2017-09-13T04:44:54Z" } ], "number": 6566, "title": "@click would trigger event other vnode @click event." }
{ "body": "fix #7109, #7546, #7707, #7834, #8109\r\nreopen #6566\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 8450, "review_comments": [], "title": "fix: always use microtasks for nextTick" }
{ "commits": [ { "message": "fix: always use microtasks for nextTick\n\nfix #7109, #7546, #7707, #7834, #8109\nreopen #6566" }, { "message": "Merge branch '2.6' into revert-nexttick" } ], "files": [ { "diff": "@@ -1,9 +1,9 @@\n /* @flow */\n-/* globals MessageChannel */\n+/* globals MutationObserver */\n \n import { noop } from 'shared/util'\n import { handleError } from './error'\n-import { isIOS, isNative } from './env'\n+import { isIE, isIOS, isNative } from './env'\n \n const callbacks = []\n let pending = false\n@@ -17,76 +17,67 @@ function flushCallbacks () {\n }\n }\n \n-// Here we have async deferring wrappers using both microtasks and (macro) tasks.\n-// In < 2.4 we used microtasks everywhere, but there are some scenarios where\n-// microtasks have too high a priority and fire in between supposedly\n-// sequential events (e.g. #4521, #6690) or even between bubbling of the same\n-// event (#6566). However, using (macro) tasks everywhere also has subtle problems\n-// when state is changed right before repaint (e.g. #6813, out-in transitions).\n-// Here we use microtask by default, but expose a way to force (macro) task when\n-// needed (e.g. in event handlers attached by v-on).\n-let microTimerFunc\n-let macroTimerFunc\n-let useMacroTask = false\n+// Here we have async deferring wrappers using microtasks.\n+// In 2.5 we used (macro) tasks (in combination with microtasks).\n+// However, it has subtle problems when state is changed right before repaint\n+// (e.g. #6813, out-in transitions).\n+// Also, using (macro) tasks in event handler would cause some weird behaviors\n+// that cannot be circumvented (e.g. #7109, #7153, #7546, #7834, #8109).\n+// So we now use microtasks everywhere, again.\n+// A major drawback of this tradeoff is that there are some scenarios\n+// where microtasks have too high a priority and fire in between supposedly\n+// sequential events (e.g. #4521, #6690, which have workarounds)\n+// or even between bubbling of the same event (#6566).\n+let timerFunc\n \n-// Determine (macro) task defer implementation.\n-// Technically setImmediate should be the ideal choice, but it's only available\n-// in IE. The only polyfill that consistently queues the callback after all DOM\n-// events triggered in the same loop is by using MessageChannel.\n-/* istanbul ignore if */\n-if (typeof setImmediate !== 'undefined' && isNative(setImmediate)) {\n- macroTimerFunc = () => {\n- setImmediate(flushCallbacks)\n- }\n-} else if (typeof MessageChannel !== 'undefined' && (\n- isNative(MessageChannel) ||\n- // PhantomJS\n- MessageChannel.toString() === '[object MessageChannelConstructor]'\n-)) {\n- const channel = new MessageChannel()\n- const port = channel.port2\n- channel.port1.onmessage = flushCallbacks\n- macroTimerFunc = () => {\n- port.postMessage(1)\n- }\n-} else {\n- /* istanbul ignore next */\n- macroTimerFunc = () => {\n- setTimeout(flushCallbacks, 0)\n- }\n-}\n-\n-// Determine microtask defer implementation.\n+// The nextTick behavior leverages the microtask queue, which can be accessed\n+// via either native Promise.then or MutationObserver.\n+// MutationObserver has wider support, however it is seriously bugged in\n+// UIWebView in iOS >= 9.3.3 when triggered in touch event handlers. It\n+// completely stops working after triggering a few times... so, if native\n+// Promise is available, we will use it:\n /* istanbul ignore next, $flow-disable-line */\n if (typeof Promise !== 'undefined' && isNative(Promise)) {\n const p = Promise.resolve()\n- microTimerFunc = () => {\n+ timerFunc = () => {\n p.then(flushCallbacks)\n- // in problematic UIWebViews, Promise.then doesn't completely break, but\n+ // In problematic UIWebViews, Promise.then doesn't completely break, but\n // it can get stuck in a weird state where callbacks are pushed into the\n // microtask queue but the queue isn't being flushed, until the browser\n // needs to do some other work, e.g. handle a timer. Therefore we can\n // \"force\" the microtask queue to be flushed by adding an empty timer.\n if (isIOS) setTimeout(noop)\n }\n-} else {\n- // fallback to macro\n- microTimerFunc = macroTimerFunc\n-}\n-\n-/**\n- * Wrap a function so that if any code inside triggers state change,\n- * the changes are queued using a (macro) task instead of a microtask.\n- */\n-export function withMacroTask (fn: Function): Function {\n- return fn._withTask || (fn._withTask = function () {\n- useMacroTask = true\n- try {\n- return fn.apply(null, arguments)\n- } finally {\n- useMacroTask = false \n- }\n+} else if (!isIE && typeof MutationObserver !== 'undefined' && (\n+ isNative(MutationObserver) ||\n+ // PhantomJS and iOS 7.x\n+ MutationObserver.toString() === '[object MutationObserverConstructor]'\n+)) {\n+ // Use MutationObserver where native Promise is not available,\n+ // e.g. PhantomJS, iOS7, Android 4.4\n+ // (#6466 MutationObserver is unreliable in IE11)\n+ let counter = 1\n+ const observer = new MutationObserver(flushCallbacks)\n+ const textNode = document.createTextNode(String(counter))\n+ observer.observe(textNode, {\n+ characterData: true\n })\n+ timerFunc = () => {\n+ counter = (counter + 1) % 2\n+ textNode.data = String(counter)\n+ }\n+} else if (typeof setImmediate !== 'undefined' && isNative(setImmediate)) {\n+ // Fallback to setImmediate.\n+ // Techinically it leverages the (macro) task queue,\n+ // but it is still a better choice than setTimeout.\n+ timerFunc = () => {\n+ setImmediate(flushCallbacks)\n+ }\n+} else {\n+ // Fallback to setTimeout.\n+ timerFunc = () => {\n+ setTimeout(flushCallbacks, 0)\n+ }\n }\n \n export function nextTick (cb?: Function, ctx?: Object) {\n@@ -104,11 +95,7 @@ export function nextTick (cb?: Function, ctx?: Object) {\n })\n if (!pending) {\n pending = true\n- if (useMacroTask) {\n- macroTimerFunc()\n- } else {\n- microTimerFunc()\n- }\n+ timerFunc()\n }\n // $flow-disable-line\n if (!cb && typeof Promise !== 'undefined') {", "filename": "src/core/util/next-tick.js", "status": "modified" }, { "diff": "@@ -35,6 +35,17 @@ function updateDOMProps (oldVnode: VNodeWithData, vnode: VNodeWithData) {\n }\n }\n \n+ // #4521: if a click event triggers update before the change event is\n+ // dispatched on a checkbox/radio input, the input's checked state will\n+ // be reset and fail to trigger another update.\n+ // The root cause here is that browsers may fire microtasks in between click/change.\n+ // In Chrome / Firefox, click event fires before change, thus having this problem.\n+ // In Safari / Edge, the order is opposite.\n+ // Note: in Edge, if you click too fast, only the click event would fire twice.\n+ if (key === 'checked' && !isNotInFocusAndDirty(elm, cur)) {\n+ continue\n+ }\n+\n if (key === 'value') {\n // store value as _value as well since\n // non-string values will be stringified", "filename": "src/platforms/web/runtime/modules/dom-props.js", "status": "modified" }, { "diff": "@@ -2,7 +2,7 @@\n \n import { isDef, isUndef } from 'shared/util'\n import { updateListeners } from 'core/vdom/helpers/index'\n-import { withMacroTask, isIE, supportsPassive } from 'core/util/index'\n+import { isIE, supportsPassive } from 'core/util/index'\n import { RANGE_TOKEN, CHECKBOX_RADIO_TOKEN } from 'web/compiler/directives/model'\n \n // normalize v-model event tokens that can only be determined at runtime.\n@@ -44,7 +44,6 @@ function add (\n capture: boolean,\n passive: boolean\n ) {\n- handler = withMacroTask(handler)\n target.addEventListener(\n event,\n handler,", "filename": "src/platforms/web/runtime/modules/events.js", "status": "modified" }, { "diff": "@@ -14,20 +14,20 @@ module.exports = {\n .assert.containsText('#case-1', '3')\n .assert.checked('#case-1 input', false)\n \n- // #6566\n- .assert.containsText('#case-2 button', 'Expand is True')\n- .assert.containsText('.count-a', 'countA: 0')\n- .assert.containsText('.count-b', 'countB: 0')\n+ // // #6566\n+ // .assert.containsText('#case-2 button', 'Expand is True')\n+ // .assert.containsText('.count-a', 'countA: 0')\n+ // .assert.containsText('.count-b', 'countB: 0')\n \n- .click('#case-2 button')\n- .assert.containsText('#case-2 button', 'Expand is False')\n- .assert.containsText('.count-a', 'countA: 1')\n- .assert.containsText('.count-b', 'countB: 0')\n+ // .click('#case-2 button')\n+ // .assert.containsText('#case-2 button', 'Expand is False')\n+ // .assert.containsText('.count-a', 'countA: 1')\n+ // .assert.containsText('.count-b', 'countB: 0')\n \n- .click('#case-2 button')\n- .assert.containsText('#case-2 button', 'Expand is True')\n- .assert.containsText('.count-a', 'countA: 1')\n- .assert.containsText('.count-b', 'countB: 1')\n+ // .click('#case-2 button')\n+ // .assert.containsText('#case-2 button', 'Expand is True')\n+ // .assert.containsText('.count-a', 'countA: 1')\n+ // .assert.containsText('.count-b', 'countB: 1')\n \n .end()\n }", "filename": "test/e2e/specs/async-edge-cases.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.5.16\r\n\r\n### Reproduction link\r\n[https://jsfiddle.net/6sny3yq9/](https://jsfiddle.net/6sny3yq9/)\r\n\r\n### Steps to reproduce\r\n1. Open fiddle and type anything into the first field marked \"broken\". \r\n2. See in the console the error message \"Error in nextTick: \"TypeError: Cannot read property '_pending' of null\"\r\n3. Notice that Vue has stopped working\r\n4. Refresh the fiddle and type anything into the second field marked \"working\"\r\n5. Notice that everything works as expected\r\n\r\n### What is expected?\r\nWhen the value of the input field is \"foo\" the computed property `bar` will evaluate to true and the \"a\" span will display. \r\n\r\nWhen the user changes the value in the input field from \"foo\" to anything else, the computed property `bar` will evaluate to false and the \"b\" span (rendered using v-html) will display.\r\n\r\n### What is actually happening?\r\nInitially, the value of the input field is \"foo\" and the computed property `bar` is evaluated to true, so the \"a\" span is displaying. \r\n\r\nBut, when the user changes the value in the input field from \"foo\" to anything else, Vue crashes and this error message is displayed: Error in nextTick: \"TypeError: Cannot read property '_pending' of null\"\r\n\r\n---\r\nIt takes this very specific scenario for the error to appear. In the fiddle, I've shown that by removing the `@click=\"show = !show\"` event listener, Vue works as expected with no errors.\r\n\r\nHere are three other changes that, when made individually, result in Vue working as expected:\r\n - removing the `<transition>` component surrounding the \"a\" span\r\n - using `<span v-else>{{ b }}</span>` instead of `<span v-else v-html=\"b\"></span>`\r\n - using `v-show=\"bar\"` and `v-show=\"!bar\"` instead of the `v-if=\"bar\"` and `v-else` directives\r\n\r\nFound via this question on Stack Overflow:\r\nhttps://stackoverflow.com/questions/50400131/cannot-read-property-pending-of-null-when-using-v-html-and-transition#\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "As a workaround, you can key the span with the `v-if`", "created_at": "2018-05-18T16:15:49Z" }, { "body": "FYI this bug is introduced in https://github.com/vuejs/vue/commit/0f2cb09444e8b2a5fa41aaf8c94e6f2f43e00c2f & https://github.com/vuejs/vue/commit/2839e31f2b7fe7d14a87d2eade31ceb11800c4ab\r\n\r\nIt only appears in a specific scenario when the root node of a conditional rendering expression is reused and the new root sets its `textContent` or `innerHTML` prop.\r\n\r\nFor reused DOM nodes, Vue calls `updateDOMProps` to patch the real DOM.\r\nBecause of the abovementioned commits, when `v-html` is set, its children gets cleared immediately.\r\nSo before the descendent `transition` element's `leave` hook gets called, its corresponding DOM node has been detached already.\r\nTherefore, an error would be thrown from this line:\r\nhttps://github.com/vuejs/vue/blob/dev/src/platforms/web/runtime/modules/transition.js#L255\r\n\r\nThe DOM node is reused only when `sameVnode` returns true.\r\nhttps://github.com/vuejs/vue/blob/dev/src/core/vdom/patch.js#L35-L43\r\nSince (in the given reproduction) the second `span` has an `v-html` attribute, `isDef(b.data)` returns true. When the first `span` gets a binding (e.g. `@click`, `:class=`, etc.), `isDef(a.data)`returns true, too.", "created_at": "2018-06-26T09:42:50Z" } ], "number": 8199, "title": "\"Cannot read property '_pending' of null\" when using transition component." }
{ "body": "Fix #8199 \r\nIf the new parentNode gets a `textContent` or `innerHTML` property during patching, the `transition` node would have been detached early, which means `el.parentNode` no longer exists.\r\nSo this existential check is necessary.\r\n\r\nThis is an imperfect fix because while errors are suppressed, transition animations still get lost.\r\nIt is due to the different mechanism of `v-html` and VDOM element patching. But fixing that would be much more complex and I don't think we'd bother fixing such an edge case.\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 8422, "review_comments": [], "title": "fix(transition): check existence of `el.parentNode`, fix #8199" }
{ "commits": [ { "message": "fix(transition): check existence of `el.parentNode`, fix #8199\n\nIf the new parentNode gets a `textContent` or `innerHTML` property during\npatching, the `transition` node would have been detached early, which means\n`el.parentNode` no longer exists." }, { "message": "fix(vdom): should not reuse nodes with `textContent` / `innerHTML` props" } ], "files": [ { "diff": "@@ -32,13 +32,20 @@ export const emptyNode = new VNode('', {}, [])\n \n const hooks = ['create', 'activate', 'update', 'remove', 'destroy']\n \n+function childrenIgnored (vnode) {\n+ return vnode && vnode.data && vnode.data.domProps && (\n+ vnode.data.domProps.innerHTML || vnode.data.domProps.textContent\n+ )\n+}\n+\n function sameVnode (a, b) {\n return (\n a.key === b.key && (\n (\n a.tag === b.tag &&\n a.isComment === b.isComment &&\n isDef(a.data) === isDef(b.data) &&\n+ !childrenIgnored(a) && !childrenIgnored(b) &&\n sameInputType(a, b)\n ) || (\n isTrue(a.isAsyncPlaceholder) &&", "filename": "src/core/vdom/patch.js", "status": "modified" }, { "diff": "@@ -251,7 +251,7 @@ export function leave (vnode: VNodeWithData, rm: Function) {\n return\n }\n // record leaving element\n- if (!vnode.data.show) {\n+ if (!vnode.data.show && el.parentNode) {\n (el.parentNode._pending || (el.parentNode._pending = {}))[(vnode.key: any)] = vnode\n }\n beforeLeave && beforeLeave(el)", "filename": "src/platforms/web/runtime/modules/transition.js", "status": "modified" }, { "diff": "@@ -1167,5 +1167,27 @@ if (!isIE9) {\n expect(vm.$el.innerHTML).toBe('<!---->')\n }).then(done)\n })\n+\n+ // #8199\n+ it('should not throw error when replaced by v-html contents', (done) => {\n+ const vm = new Vue({\n+ template: `\n+ <div>\n+ <div v-if=\"ok\" :class=\"ok\">\n+ <transition>\n+ <span>a</span>\n+ </transition>\n+ </div>\n+ <div v-else v-html=\"ok\"></div>\n+ </div>\n+ `,\n+ data: { ok: true }\n+ }).$mount(el)\n+\n+ vm.ok = false\n+ waitForUpdate(() => {\n+ expect(vm.$el.children[0].innerHTML).toBe('false')\n+ }).then(done)\n+ })\n })\n }", "filename": "test/unit/features/transition/transition.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.5.2\r\n\r\n### Reproduction link\r\n[https://codesandbox.io/s/ympx4pk72z](https://codesandbox.io/s/ympx4pk72z)\r\n\r\n### Steps to reproduce\r\n1、click the home link\r\n2、click the test link\r\n\r\n### What is expected?\r\nafter the Home component destroyed,its updated hook should not be called\r\n\r\n### What is actually happening?\r\nafter the Home component destroyed,its updated hook is called\r\n\r\n---\r\nhere is the log information\r\n\r\nHome beforeCreate\r\nHome created\r\nHome beforeMount\r\nHome mounted\r\nTest beforeCreate\r\nTest created\r\nTest beforeMount\r\nHome beforeDestroy\r\nHome destroyed\r\nTest mounted\r\nHome updated\r\n\r\nwhy the Home component can call updated hook after it is destroyed?\r\nIs Vue designed to do so?\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "@BL-Lac149597870 In your reproduction link, you have not defined your components as `.vue` component. ", "created_at": "2018-04-25T15:38:02Z" }, { "body": "@mbj36 my bad.Now i have just corrected the mistake and changed the repo link,but the problem still remains...", "created_at": "2018-04-26T00:54:37Z" }, { "body": "@BL-Lac149597870 It's always better to use computed property or watcher - See the api docs here - https://vuejs.org/v2/api/#updated", "created_at": "2018-04-26T05:27:01Z" }, { "body": "@mbj36 Thank you for your reply. But I didn't perform any DOM-dependent operations when the updated hook was called.And according to Vue lifecycle,after one component is destroyed,its updated hook shouldn't be called.But in my repo,this condition happens.I can't figure out how this happens.", "created_at": "2018-04-26T05:56:45Z" }, { "body": "Met this problem, too.\r\nAny news for this?😂\r\n @BL-Lac149597870 ", "created_at": "2018-05-14T07:01:08Z" }, { "body": "@shockw4ver Maybe we can work it out together,hahaha~", "created_at": "2018-05-22T06:10:59Z" }, { "body": "I hate to bring bad news, but the fix only triggers the update part of the lifecycle, leaving `beforeUpdate` still being invoked after the component is destroyed.\r\n\r\nUse the sandbox above with ` \"vue\": \"^2.5.18-beta.0\",` to verify\r\n(xposted on the PR accepted)", "created_at": "2018-12-07T10:35:40Z" } ], "number": 8076, "title": "component calls update hook after it is destroyed" }
{ "body": "…stroyed, fix #8076\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 8381, "review_comments": [], "title": "fix(lifecycle): updated should not be called after component being de…" }
{ "commits": [ { "message": "fix(lifecycle): updated should not be called after component being destroyed, fix #8076" } ], "files": [ { "diff": "@@ -98,7 +98,7 @@ function callUpdatedHooks (queue) {\n while (i--) {\n const watcher = queue[i]\n const vm = watcher.vm\n- if (vm._watcher === watcher && vm._isMounted) {\n+ if (vm._watcher === watcher && vm._isMounted && !vm._isDestroyed) {\n callHook(vm, 'updated')\n }\n }", "filename": "src/core/observer/scheduler.js", "status": "modified" }, { "diff": "@@ -199,6 +199,43 @@ describe('Options lifecycle hooks', () => {\n expect(calls).toEqual(['child', 'parent'])\n }).then(done)\n })\n+\n+ // #8076\n+ it('should not be called after destroy', done => {\n+ const updated = jasmine.createSpy('updated')\n+ const destroyed = jasmine.createSpy('destroyed')\n+\n+ Vue.component('todo', {\n+ template: '<div>{{todo.done}}</div>',\n+ props: ['todo'],\n+ destroyed,\n+ updated\n+ })\n+\n+ const vm = new Vue({\n+ template: `\n+ <div>\n+ <todo v-for=\"t in pendingTodos\" :todo=\"t\" :key=\"t.id\"></todo>\n+ </div>\n+ `,\n+ data () {\n+ return {\n+ todos: [{ id: 1, done: false }]\n+ }\n+ },\n+ computed: {\n+ pendingTodos () {\n+ return this.todos.filter(t => !t.done)\n+ }\n+ }\n+ }).$mount()\n+\n+ vm.todos[0].done = true\n+ waitForUpdate(() => {\n+ expect(destroyed).toHaveBeenCalled()\n+ expect(updated).not.toHaveBeenCalled()\n+ }).then(done)\n+ })\n })\n \n describe('beforeDestroy', () => {", "filename": "test/unit/features/options/lifecycle.spec.js", "status": "modified" } ] }
{ "body": "### Version\r\n2.5.2\r\n\r\n### Reproduction link\r\n[https://codesandbox.io/s/mzvkppmvo8](https://codesandbox.io/s/mzvkppmvo8)\r\n\r\n### Steps to reproduce\r\n1. I created component with scoped slot (AppSwitcher.vue)\r\n2. Then I use it in another component with their own slot (HelloWorld.vue with slot \"subtext\")\r\n3. Add some element to slot (div in App.vue) \r\n\r\n### What is expected?\r\nIt should work without errors\r\n\r\n### What is actually happening?\r\nChanges in AppSwitcher.vue caus \"Duplicate presence of slot \"subtext\" found in the same render tree\" error but there are no duplicates.\r\n\r\n---\r\nAlso, adding `slot-scope` to div in App.vue solves problem and no error there, but why it happens without `slot-scope`?\r\n\r\n<!-- generated by vue-issues. DO NOT REMOVE -->", "comments": [ { "body": "Hello @Kelin2025 \r\n\r\nIn v2.5, there was some changes in `scoped-slot`, you can go through them here - https://gist.github.com/yyx990803/9bdff05e5468a60ced06c29c39114c6b#simplified-scoped-slots-usage\r\n\r\nThis might solve your confusion", "created_at": "2018-03-28T18:59:16Z" }, { "body": "Yeah, I know it, but I don't really understand why I get this error, there are no __duplicates__ of slot but only rerender.", "created_at": "2018-03-28T21:30:56Z" }, { "body": "It looks like a slot inside a scoped slot is rendered more than once.", "created_at": "2018-04-02T09:24:25Z" }, { "body": "@Justineo it's being re-rendered on changes but it shouldn't cause problems, I think :thinking: ", "created_at": "2018-04-02T09:46:25Z" }, { "body": "My 2 cents.\r\n\r\nHere is another reproduction of this issue (https://codesandbox.io/s/m5kl6p97qx). Notice the warning being raised only once the modal is shown more than once. \r\n\r\nOn the other hand, the JSX version of the same component is not raising any warning (https://codesandbox.io/s/k0wpj60z5r).", "created_at": "2018-04-12T14:22:34Z" }, { "body": "I'm experiencing the same thing. Anything that triggers a re-render inside the slot-scope causes a warning about duplicate slot presence.\r\n\r\n[The issue seems to be here on this line here](https://github.com/vuejs/vue/blob/5a9da95b8a865416f082952a48416ffc091e4078/src/core/instance/render-helpers/render-slot.js#L34). Once the slots have been rendered once, subsequent executions of that block have `slotNodes._rendered` as true, presumably from the first render.\r\n\r\nI would imagine something should be setting that back to false when a re-render is triggered, but admittedly I know almost nothing about how Vue works under the hood so that's just a wild guess.\r\n\r\nThis bug seems like an unlikely edge case but it's happened to me a couple of times recently. I'm a big fan of the pattern of writing renderless components that can be paired with concrete implementations, and trying to pass the content from the consumer into the concrete implementation is when this issue is arising.", "created_at": "2018-05-22T22:50:30Z" }, { "body": "try this:\r\nmy-component:\r\n```\r\n<template>\r\n <slot :someprop=\"value\"></slot>\r\n</template>\r\n```\r\n\r\napp:\r\n```\r\n<my-component>\r\n <template slot-scop=\"someprop\">{{ prop }} ... and do something else</template>\r\n</my-component>\r\n```\r\n\r\nespecially when \"slot\" in \"v-for\"", "created_at": "2018-08-08T11:00:59Z" }, { "body": "Note that if you're using Vue.js ≤v2.4.x, then you may see this error if you are trying to use `slot-scope` (that's what was going on for me).\r\n\r\nThe solution is to either update Vue.js to ≥2.5 OR use \"scope\" instead of \"slot-scope\":\r\n\r\n![image](https://user-images.githubusercontent.com/618009/45494457-afc76f00-b736-11e8-880b-8e3a25a7f013.png)\r\n_https://vuejs.org/v2/api/#scope-replaced_\r\n\r\nHope that helps anyone else finding this on google like I did!\r\n\r\n\r\n-------------------------------------------------------\r\n\r\nPS. If you're using Vue.js ≤v2.4.x, remember that you'll need to use a `<template>` element -- you can't just put a slot scope boundary on any miscellaneous element until ≥2.5.x. A little real-world example:\r\n\r\n```html\r\n <div class=\"form-group col-md\">\r\n <label>Autocomplete field 4 (w/ custom search results):</label>\r\n <autocomplete v-model=\"autocompleteExampleValue4\" action=\"listGlobalSearchResults\" :handle-formatting-response-data=\"handleFormattingDummySearchResults\" placeholder=\"This one has a custom placeholder too\">\r\n <template slot=\"search-result-item\" scope=\"slotData\"><!-- Note that you can use destructuring here, but it only works in modern browsers -- otherwise you have to define a separate variable -- like scope=\"slotData\" and then use {{slotData.id}}... (see https://vuejs.org/v2/guide/components-slots.html#Destructuring-slot-scope) -->\r\n <!-- TODO: update to vue ≥ 2.5.0 to allow this slotData thing to be attached w/o using a template element. Also when we do that, \"scope\" will become \"slot-scope\". See https://vuejs.org/v2/guide/components-slots.html#Scoped-Slots for more info -->\r\n <span>{{slotData.searchResult.label}}</span>\r\n </template>\r\n </autocomplete>\r\n </div>\r\n```", "created_at": "2018-09-13T14:24:07Z" }, { "body": "> The solution is to either update Vue.js to ≥2.5 OR use \"scope\" instead of \"slot-scope\":\r\n\r\nI still have this issue on `2.5.17`.\r\n\r\nIt's really annoying when trying to build renderless component abstractions, as this comes up quite often.", "created_at": "2018-10-01T15:11:56Z" } ], "number": 7913, "title": "Unexpected \"duplicate presence of slot\"" }
{ "body": "Because slotNodes inside a slot-scope context are already set to \\_rendered = true after initial\r\nrender, the warning for duplicate slot presence always fires when a slot-scope prop change triggers\r\na re-render. With this change, the compiler tracks whether any slot-scoped elements have been\r\nencountered yet at the point the slot is compiled. If so, the direct ancestors of the slot are checked\r\nfor slot-scope presence, and if one is found, the warning is supressed.\r\n\r\nThis is admittedly not a perfect solution, as within a slot-scope context the warning now does not fire even when there _are_ duplicate slots, but I couldn't find a good way to get around that.\r\n\r\nfix #7913\r\n\r\n<!--\r\nPlease make sure to read the Pull Request Guidelines:\r\nhttps://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#pull-request-guidelines\r\n-->\r\n\r\n<!-- PULL REQUEST TEMPLATE -->\r\n<!-- (Update \"[ ]\" to \"[x]\" to check a box) -->\r\n\r\n**What kind of change does this PR introduce?** (check at least one)\r\n\r\n- [x] Bugfix\r\n- [ ] Feature\r\n- [ ] Code style update\r\n- [ ] Refactor\r\n- [ ] Build-related changes\r\n- [ ] Other, please describe:\r\n\r\n**Does this PR introduce a breaking change?** (check one)\r\n\r\n- [ ] Yes\r\n- [x] No\r\n\r\nIf yes, please describe the impact and migration path for existing applications:\r\n\r\n**The PR fulfills these requirements:**\r\n\r\n- [x] It's submitted to the `dev` branch for v2.x (or to a previous version branch), _not_ the `master` branch\r\n- [x] When resolving a specific issue, it's referenced in the PR's title (e.g. `fix #xxx[,#xxx]`, where \"xxx\" is the issue number)\r\n- [x] All tests are passing: https://github.com/vuejs/vue/blob/dev/.github/CONTRIBUTING.md#development-setup\r\n- [x] New/updated tests are included\r\n\r\nIf adding a **new feature**, the PR's description includes:\r\n- [ ] A convincing reason for adding this feature (to avoid wasting your time, it's best to open a suggestion issue first and wait for approval before working on it)\r\n\r\n**Other information:**\r\n", "number": 8250, "review_comments": [], "title": "fix(#7913): Prevent erroneous warning when using <slot> inside slot-scope" }
{ "commits": [ { "message": "fix(#7913): Prevent erroneous warning when using <slot> inside slot-scope\n\nBecause slotNodes inside a slot-scope context are already set to _rendered = true after initial\nrender, the warning for duplicate slot presence always fires when a slot-scope prop change triggers\na re-render. With this change, the compiler tracks whether any slot-scoped elements have been\nencountered at the point the slot is compiled. If so, the direct ancestors of the slot are checked\nfor slot-scope presence, and if found, the warning is supressed. This is admittedly not a perfect\nsolution, as within a slot-scope context the warning now does not fire even when there _are_\nduplicate slots, but I couldn't find a good way to get around that.\n\nfix #7913" } ], "files": [ { "diff": "@@ -18,6 +18,7 @@ export class CodegenState {\n maybeComponent: (el: ASTElement) => boolean;\n onceId: number;\n staticRenderFns: Array<string>;\n+ slotScopeinAst: boolean;\n \n constructor (options: CompilerOptions) {\n this.options = options\n@@ -29,6 +30,8 @@ export class CodegenState {\n this.maybeComponent = (el: ASTElement) => !isReservedTag(el.tag)\n this.onceId = 0\n this.staticRenderFns = []\n+ // we can skip checks for slot-scope parents if we haven't seen any\n+ this.slotScopeinAst = false\n }\n }\n \n@@ -50,6 +53,7 @@ export function generate (\n }\n \n export function genElement (el: ASTElement, state: CodegenState): string {\n+ if (el.slotScope) state.slotScopeinAst = true\n if (el.staticRoot && !el.staticProcessed) {\n return genStatic(el, state)\n } else if (el.once && !el.onceProcessed) {\n@@ -456,19 +460,18 @@ export function genComment (comment: ASTText): string {\n function genSlot (el: ASTElement, state: CodegenState): string {\n const slotName = el.slotName || '\"default\"'\n const children = genChildren(el, state)\n- let res = `_t(${slotName}${children ? `,${children}` : ''}`\n const attrs = el.attrs && `{${el.attrs.map(a => `${camelize(a.name)}:${a.value}`).join(',')}}`\n const bind = el.attrsMap['v-bind']\n- if ((attrs || bind) && !children) {\n- res += `,null`\n- }\n- if (attrs) {\n- res += `,${attrs}`\n- }\n- if (bind) {\n- res += `${attrs ? '' : ',null'},${bind}`\n+ let inScopedSlot = false\n+ let ancestor = el\n+ if (process.env.NODE_ENV !== 'production' && state.slotScopeinAst) {\n+ while (!inScopedSlot && ancestor.parent) {\n+ if (ancestor.slotScope) inScopedSlot = true\n+ ancestor = ancestor.parent\n+ }\n }\n- return res + ')'\n+ return `_t(${slotName},${children || 'null'},${attrs || 'null'},` +\n+ `${bind || 'null'},${inScopedSlot ? 'true' : 'false'})`\n }\n \n // componentName is el.component, take it as argument to shun flow's pessimistic refinement", "filename": "src/compiler/codegen/index.js", "status": "modified" }, { "diff": "@@ -9,7 +9,8 @@ export function renderSlot (\n name: string,\n fallback: ?Array<VNode>,\n props: ?Object,\n- bindObject: ?Object\n+ bindObject: ?Object,\n+ inSlotScope: ?boolean\n ): ?Array<VNode> {\n const scopedSlotFn = this.$scopedSlots[name]\n let nodes\n@@ -29,7 +30,7 @@ export function renderSlot (\n const slotNodes = this.$slots[name]\n // warn duplicate slot usage\n if (slotNodes) {\n- if (process.env.NODE_ENV !== 'production' && slotNodes._rendered) {\n+ if (process.env.NODE_ENV !== 'production' && !inSlotScope && slotNodes._rendered) {\n warn(\n `Duplicate presence of slot \"${name}\" found in the same render tree ` +\n `- this will likely cause render errors.`,", "filename": "src/core/instance/render-helpers/render-slot.js", "status": "modified" }, { "diff": "@@ -161,21 +161,21 @@ describe('codegen', () => {\n it('generate single slot', () => {\n assertCodegen(\n '<div><slot></slot></div>',\n- `with(this){return _c('div',[_t(\"default\")],2)}`\n+ `with(this){return _c('div',[_t(\"default\",null,null,null,false)],2)}`\n )\n })\n \n it('generate named slot', () => {\n assertCodegen(\n '<div><slot name=\"one\"></slot></div>',\n- `with(this){return _c('div',[_t(\"one\")],2)}`\n+ `with(this){return _c('div',[_t(\"one\",null,null,null,false)],2)}`\n )\n })\n \n it('generate slot fallback content', () => {\n assertCodegen(\n '<div><slot><div>hi</div></slot></div>',\n- `with(this){return _c('div',[_t(\"default\",[_c('div',[_v(\"hi\")])])],2)}`\n+ `with(this){return _c('div',[_t(\"default\",[_c('div',[_v(\"hi\")])],null,null,false)],2)}`\n )\n })\n ", "filename": "test/unit/modules/compiler/codegen.spec.js", "status": "modified" } ] }